@loaders.gl/i3s 4.4.0-alpha.2 → 4.4.0-alpha.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/arcgis-webscene-loader.js +2 -1
- package/dist/arcgis-webscene-loader.js.map +1 -0
- package/dist/dist.dev.js +402 -298
- package/dist/dist.min.js +1 -1
- package/dist/i3s-attribute-loader.js +2 -1
- package/dist/i3s-attribute-loader.js.map +1 -0
- package/dist/i3s-building-scene-layer-loader.js +2 -1
- package/dist/i3s-building-scene-layer-loader.js.map +1 -0
- package/dist/i3s-content-loader.js +2 -1
- package/dist/i3s-content-loader.js.map +1 -0
- package/dist/i3s-content-worker-node.js +45 -45
- package/dist/i3s-content-worker-node.js.map +4 -4
- package/dist/i3s-content-worker.js +396 -159
- package/dist/i3s-loader.d.ts +13 -8
- package/dist/i3s-loader.d.ts.map +1 -1
- package/dist/i3s-loader.js +8 -7
- package/dist/i3s-loader.js.map +1 -0
- package/dist/i3s-node-page-loader.js +2 -1
- package/dist/i3s-node-page-loader.js.map +1 -0
- package/dist/i3s-slpk-loader.d.ts +6 -1
- package/dist/i3s-slpk-loader.d.ts.map +1 -1
- package/dist/i3s-slpk-loader.js +10 -4
- package/dist/i3s-slpk-loader.js.map +1 -0
- package/dist/index.cjs +35 -27
- package/dist/index.cjs.map +4 -4
- package/dist/index.js +1 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/helpers/i3s-nodepages-tiles.js +5 -4
- package/dist/lib/helpers/i3s-nodepages-tiles.js.map +1 -0
- package/dist/lib/parsers/constants.js +1 -0
- package/dist/lib/parsers/constants.js.map +1 -0
- package/dist/lib/parsers/parse-arcgis-webscene.js +1 -0
- package/dist/lib/parsers/parse-arcgis-webscene.js.map +1 -0
- package/dist/lib/parsers/parse-i3s-attribute.js +1 -0
- package/dist/lib/parsers/parse-i3s-attribute.js.map +1 -0
- package/dist/lib/parsers/parse-i3s-building-scene-layer.js +1 -0
- package/dist/lib/parsers/parse-i3s-building-scene-layer.js.map +1 -0
- package/dist/lib/parsers/parse-i3s-tile-content.d.ts +2 -2
- package/dist/lib/parsers/parse-i3s-tile-content.d.ts.map +1 -1
- package/dist/lib/parsers/parse-i3s-tile-content.js +1 -0
- package/dist/lib/parsers/parse-i3s-tile-content.js.map +1 -0
- package/dist/lib/parsers/parse-i3s.d.ts.map +1 -1
- package/dist/lib/parsers/parse-i3s.js +6 -4
- package/dist/lib/parsers/parse-i3s.js.map +1 -0
- package/dist/lib/parsers/parse-slpk/parse-slpk.d.ts +3 -3
- package/dist/lib/parsers/parse-slpk/parse-slpk.d.ts.map +1 -1
- package/dist/lib/parsers/parse-slpk/parse-slpk.js +4 -3
- package/dist/lib/parsers/parse-slpk/parse-slpk.js.map +1 -0
- package/dist/lib/parsers/parse-slpk/slpk-archieve.d.ts +3 -3
- package/dist/lib/parsers/parse-slpk/slpk-archieve.d.ts.map +1 -1
- package/dist/lib/parsers/parse-slpk/slpk-archieve.js +5 -4
- package/dist/lib/parsers/parse-slpk/slpk-archieve.js.map +1 -0
- package/dist/lib/utils/convert-i3s-obb-to-mbs.js +1 -0
- package/dist/lib/utils/convert-i3s-obb-to-mbs.js.map +1 -0
- package/dist/lib/utils/customize-colors.js +5 -2
- package/dist/lib/utils/customize-colors.js.map +1 -0
- package/dist/lib/utils/url-utils.js +1 -0
- package/dist/lib/utils/url-utils.js.map +1 -0
- package/dist/types.js +1 -0
- package/dist/types.js.map +1 -0
- package/dist/workers/i3s-content-worker-node.js +1 -0
- package/dist/workers/i3s-content-worker-node.js.map +1 -0
- package/dist/workers/i3s-content-worker.js +1 -0
- package/dist/workers/i3s-content-worker.js.map +1 -0
- package/package.json +18 -12
- package/src/i3s-loader.ts +15 -10
- package/src/i3s-slpk-loader.ts +8 -3
- package/src/lib/helpers/i3s-nodepages-tiles.ts +4 -4
- package/src/lib/parsers/parse-i3s-tile-content.ts +2 -2
- package/src/lib/parsers/parse-i3s.ts +7 -4
- package/src/lib/parsers/parse-slpk/parse-slpk.ts +7 -5
- package/src/lib/parsers/parse-slpk/slpk-archieve.ts +7 -10
- package/src/lib/utils/customize-colors.ts +4 -2
package/dist/dist.dev.js
CHANGED
|
@@ -9079,6 +9079,9 @@ var __exports__ = (() => {
|
|
|
9079
9079
|
var matches = typeof process !== "undefined" && process.version && /v([0-9]*)/.exec(process.version);
|
|
9080
9080
|
var nodeVersion = matches && parseFloat(matches[1]) || 0;
|
|
9081
9081
|
|
|
9082
|
+
// ../loader-utils/src/lib/javascript-utils/is-type.ts
|
|
9083
|
+
var isSharedArrayBuffer = (value) => typeof SharedArrayBuffer !== "undefined" && value instanceof SharedArrayBuffer;
|
|
9084
|
+
|
|
9082
9085
|
// ../loader-utils/src/lib/module-utils/js-module-utils.ts
|
|
9083
9086
|
function registerJSModules(modules) {
|
|
9084
9087
|
globalThis.loaders ||= {};
|
|
@@ -9094,14 +9097,16 @@ var __exports__ = (() => {
|
|
|
9094
9097
|
var NPM_TAG = "beta";
|
|
9095
9098
|
|
|
9096
9099
|
// ../worker-utils/src/lib/env-utils/version.ts
|
|
9100
|
+
var warningIssued = false;
|
|
9097
9101
|
function getVersion() {
|
|
9098
9102
|
if (!globalThis._loadersgl_?.version) {
|
|
9099
9103
|
globalThis._loadersgl_ = globalThis._loadersgl_ || {};
|
|
9100
|
-
if (typeof __VERSION__ === "undefined") {
|
|
9104
|
+
if (typeof __VERSION__ === "undefined" && !warningIssued) {
|
|
9101
9105
|
console.warn(
|
|
9102
9106
|
"loaders.gl: The __VERSION__ variable is not injected using babel plugin. Latest unstable workers would be fetched from the CDN."
|
|
9103
9107
|
);
|
|
9104
9108
|
globalThis._loadersgl_.version = NPM_TAG;
|
|
9109
|
+
warningIssued = true;
|
|
9105
9110
|
} else {
|
|
9106
9111
|
globalThis._loadersgl_.version = __VERSION__;
|
|
9107
9112
|
}
|
|
@@ -9148,6 +9153,9 @@ var __exports__ = (() => {
|
|
|
9148
9153
|
return await loadLibraryPromises[libraryUrl];
|
|
9149
9154
|
}
|
|
9150
9155
|
function getLibraryUrl(library, moduleName, options = {}, libraryName = null) {
|
|
9156
|
+
if (options?.core) {
|
|
9157
|
+
throw new Error("loadLibrary: options.core must be pre-normalized");
|
|
9158
|
+
}
|
|
9151
9159
|
if (!options.useLocalLibraries && library.startsWith("http")) {
|
|
9152
9160
|
return library;
|
|
9153
9161
|
}
|
|
@@ -9173,10 +9181,20 @@ var __exports__ = (() => {
|
|
|
9173
9181
|
return await loadAsArrayBuffer(libraryUrl);
|
|
9174
9182
|
}
|
|
9175
9183
|
if (!isBrowser2) {
|
|
9184
|
+
const { requireFromFile } = globalThis.loaders || {};
|
|
9176
9185
|
try {
|
|
9177
|
-
const
|
|
9178
|
-
|
|
9186
|
+
const result = await requireFromFile?.(libraryUrl);
|
|
9187
|
+
if (result || !libraryUrl.includes("/dist/libs/")) {
|
|
9188
|
+
return result;
|
|
9189
|
+
}
|
|
9190
|
+
return await requireFromFile?.(libraryUrl.replace("/dist/libs/", "/src/libs/"));
|
|
9179
9191
|
} catch (error) {
|
|
9192
|
+
if (libraryUrl.includes("/dist/libs/")) {
|
|
9193
|
+
try {
|
|
9194
|
+
return await requireFromFile?.(libraryUrl.replace("/dist/libs/", "/src/libs/"));
|
|
9195
|
+
} catch {
|
|
9196
|
+
}
|
|
9197
|
+
}
|
|
9180
9198
|
console.error(error);
|
|
9181
9199
|
return null;
|
|
9182
9200
|
}
|
|
@@ -9212,7 +9230,14 @@ var __exports__ = (() => {
|
|
|
9212
9230
|
const response = await fetch(url);
|
|
9213
9231
|
return await response.arrayBuffer();
|
|
9214
9232
|
}
|
|
9215
|
-
|
|
9233
|
+
try {
|
|
9234
|
+
return await readFileAsArrayBuffer(url);
|
|
9235
|
+
} catch {
|
|
9236
|
+
if (url.includes("/dist/libs/")) {
|
|
9237
|
+
return await readFileAsArrayBuffer(url.replace("/dist/libs/", "/src/libs/"));
|
|
9238
|
+
}
|
|
9239
|
+
throw new Error(`Failed to load ArrayBuffer from ${url}`);
|
|
9240
|
+
}
|
|
9216
9241
|
}
|
|
9217
9242
|
async function loadAsText(url) {
|
|
9218
9243
|
const { readFileAsText } = globalThis.loaders || {};
|
|
@@ -9220,7 +9245,14 @@ var __exports__ = (() => {
|
|
|
9220
9245
|
const response = await fetch(url);
|
|
9221
9246
|
return await response.text();
|
|
9222
9247
|
}
|
|
9223
|
-
|
|
9248
|
+
try {
|
|
9249
|
+
return await readFileAsText(url);
|
|
9250
|
+
} catch {
|
|
9251
|
+
if (url.includes("/dist/libs/")) {
|
|
9252
|
+
return await readFileAsText(url.replace("/dist/libs/", "/src/libs/"));
|
|
9253
|
+
}
|
|
9254
|
+
throw new Error(`Failed to load text from ${url}`);
|
|
9255
|
+
}
|
|
9224
9256
|
}
|
|
9225
9257
|
|
|
9226
9258
|
// ../loader-utils/src/lib/binary-utils/array-buffer-utils.ts
|
|
@@ -9259,10 +9291,26 @@ var __exports__ = (() => {
|
|
|
9259
9291
|
async function concatenateArrayBuffersAsync(asyncIterator) {
|
|
9260
9292
|
const arrayBuffers = [];
|
|
9261
9293
|
for await (const chunk of asyncIterator) {
|
|
9262
|
-
arrayBuffers.push(chunk);
|
|
9294
|
+
arrayBuffers.push(copyToArrayBuffer(chunk));
|
|
9263
9295
|
}
|
|
9264
9296
|
return concatenateArrayBuffers(...arrayBuffers);
|
|
9265
9297
|
}
|
|
9298
|
+
function copyToArrayBuffer(chunk) {
|
|
9299
|
+
if (chunk instanceof ArrayBuffer) {
|
|
9300
|
+
return chunk;
|
|
9301
|
+
}
|
|
9302
|
+
if (ArrayBuffer.isView(chunk)) {
|
|
9303
|
+
const { buffer, byteOffset, byteLength } = chunk;
|
|
9304
|
+
return copyFromBuffer(buffer, byteOffset, byteLength);
|
|
9305
|
+
}
|
|
9306
|
+
return copyFromBuffer(chunk);
|
|
9307
|
+
}
|
|
9308
|
+
function copyFromBuffer(buffer, byteOffset = 0, byteLength = buffer.byteLength - byteOffset) {
|
|
9309
|
+
const view = new Uint8Array(buffer, byteOffset, byteLength);
|
|
9310
|
+
const copy4 = new Uint8Array(view.length);
|
|
9311
|
+
copy4.set(view);
|
|
9312
|
+
return copy4.buffer;
|
|
9313
|
+
}
|
|
9266
9314
|
|
|
9267
9315
|
// ../loader-utils/src/lib/node/buffer.browser.ts
|
|
9268
9316
|
function toArrayBuffer(buffer) {
|
|
@@ -9280,11 +9328,15 @@ var __exports__ = (() => {
|
|
|
9280
9328
|
if (data instanceof ArrayBuffer) {
|
|
9281
9329
|
return data;
|
|
9282
9330
|
}
|
|
9331
|
+
if (isSharedArrayBuffer(data)) {
|
|
9332
|
+
return copyToArrayBuffer2(data);
|
|
9333
|
+
}
|
|
9283
9334
|
if (ArrayBuffer.isView(data)) {
|
|
9335
|
+
const buffer = data.buffer;
|
|
9284
9336
|
if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {
|
|
9285
|
-
return
|
|
9337
|
+
return buffer;
|
|
9286
9338
|
}
|
|
9287
|
-
return
|
|
9339
|
+
return buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);
|
|
9288
9340
|
}
|
|
9289
9341
|
if (typeof data === "string") {
|
|
9290
9342
|
const text = data;
|
|
@@ -9296,6 +9348,12 @@ var __exports__ = (() => {
|
|
|
9296
9348
|
}
|
|
9297
9349
|
throw new Error("toArrayBuffer");
|
|
9298
9350
|
}
|
|
9351
|
+
function copyToArrayBuffer2(buffer, byteOffset = 0, byteLength = buffer.byteLength - byteOffset) {
|
|
9352
|
+
const view = new Uint8Array(buffer, byteOffset, byteLength);
|
|
9353
|
+
const copy4 = new Uint8Array(view.length);
|
|
9354
|
+
copy4.set(view);
|
|
9355
|
+
return copy4.buffer;
|
|
9356
|
+
}
|
|
9299
9357
|
|
|
9300
9358
|
// ../loader-utils/src/lib/node/promisify.ts
|
|
9301
9359
|
function promisify1(fn) {
|
|
@@ -9304,6 +9362,33 @@ var __exports__ = (() => {
|
|
|
9304
9362
|
);
|
|
9305
9363
|
}
|
|
9306
9364
|
|
|
9365
|
+
// ../loader-utils/src/lib/files/blob-file.ts
|
|
9366
|
+
var BlobFile = class {
|
|
9367
|
+
handle;
|
|
9368
|
+
size;
|
|
9369
|
+
bigsize;
|
|
9370
|
+
url;
|
|
9371
|
+
constructor(blob) {
|
|
9372
|
+
this.handle = blob instanceof ArrayBuffer ? new Blob([blob]) : blob;
|
|
9373
|
+
this.size = blob instanceof ArrayBuffer ? blob.byteLength : blob.size;
|
|
9374
|
+
this.bigsize = BigInt(this.size);
|
|
9375
|
+
this.url = blob instanceof File ? blob.name : "";
|
|
9376
|
+
}
|
|
9377
|
+
async close() {
|
|
9378
|
+
}
|
|
9379
|
+
async stat() {
|
|
9380
|
+
return {
|
|
9381
|
+
size: this.handle.size,
|
|
9382
|
+
bigsize: BigInt(this.handle.size),
|
|
9383
|
+
isDirectory: false
|
|
9384
|
+
};
|
|
9385
|
+
}
|
|
9386
|
+
async read(start, length4) {
|
|
9387
|
+
const arrayBuffer = await this.handle.slice(Number(start), Number(start) + Number(length4)).arrayBuffer();
|
|
9388
|
+
return arrayBuffer;
|
|
9389
|
+
}
|
|
9390
|
+
};
|
|
9391
|
+
|
|
9307
9392
|
// ../loader-utils/src/lib/files/node-file-facade.ts
|
|
9308
9393
|
var NOT_IMPLEMENTED = new Error("Not implemented");
|
|
9309
9394
|
var NodeFileFacade = class {
|
|
@@ -9345,163 +9430,6 @@ var __exports__ = (() => {
|
|
|
9345
9430
|
}
|
|
9346
9431
|
};
|
|
9347
9432
|
|
|
9348
|
-
// ../loader-utils/src/lib/file-provider/file-provider-interface.ts
|
|
9349
|
-
var isFileProvider = (fileProvider) => {
|
|
9350
|
-
return fileProvider?.getUint8 && fileProvider?.slice && fileProvider?.length;
|
|
9351
|
-
};
|
|
9352
|
-
|
|
9353
|
-
// ../loader-utils/src/lib/file-provider/file-handle-file.ts
|
|
9354
|
-
var FileHandleFile = class {
|
|
9355
|
-
/** The FileHandle from which data is provided */
|
|
9356
|
-
file;
|
|
9357
|
-
/** Create a new FileHandleFile */
|
|
9358
|
-
constructor(path, append = false) {
|
|
9359
|
-
this.file = new NodeFileFacade(path, append ? "a+" : "r");
|
|
9360
|
-
}
|
|
9361
|
-
/**
|
|
9362
|
-
* Truncates the file descriptor.
|
|
9363
|
-
* @param length desired file lenght
|
|
9364
|
-
*/
|
|
9365
|
-
async truncate(length4) {
|
|
9366
|
-
await this.file.truncate(length4);
|
|
9367
|
-
}
|
|
9368
|
-
/**
|
|
9369
|
-
* Append data to a file.
|
|
9370
|
-
* @param buffer data to append
|
|
9371
|
-
*/
|
|
9372
|
-
async append(buffer) {
|
|
9373
|
-
await this.file.append(buffer);
|
|
9374
|
-
}
|
|
9375
|
-
/** Close file */
|
|
9376
|
-
async destroy() {
|
|
9377
|
-
await this.file.close();
|
|
9378
|
-
}
|
|
9379
|
-
/**
|
|
9380
|
-
* Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.
|
|
9381
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
9382
|
-
*/
|
|
9383
|
-
async getUint8(offset) {
|
|
9384
|
-
const arrayBuffer = await this.file.read(offset, 1);
|
|
9385
|
-
const val = new Uint8Array(arrayBuffer).at(0);
|
|
9386
|
-
if (val === void 0) {
|
|
9387
|
-
throw new Error("something went wrong");
|
|
9388
|
-
}
|
|
9389
|
-
return val;
|
|
9390
|
-
}
|
|
9391
|
-
/**
|
|
9392
|
-
* Gets an unsigned 16-bit integer at the specified byte offset from the start of the file.
|
|
9393
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
9394
|
-
*/
|
|
9395
|
-
async getUint16(offset) {
|
|
9396
|
-
const arrayBuffer = await this.file.read(offset, 2);
|
|
9397
|
-
const val = new Uint16Array(arrayBuffer).at(0);
|
|
9398
|
-
if (val === void 0) {
|
|
9399
|
-
throw new Error("something went wrong");
|
|
9400
|
-
}
|
|
9401
|
-
return val;
|
|
9402
|
-
}
|
|
9403
|
-
/**
|
|
9404
|
-
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
|
|
9405
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
9406
|
-
*/
|
|
9407
|
-
async getUint32(offset) {
|
|
9408
|
-
const arrayBuffer = await this.file.read(offset, 4);
|
|
9409
|
-
const val = new Uint32Array(arrayBuffer).at(0);
|
|
9410
|
-
if (val === void 0) {
|
|
9411
|
-
throw new Error("something went wrong");
|
|
9412
|
-
}
|
|
9413
|
-
return val;
|
|
9414
|
-
}
|
|
9415
|
-
/**
|
|
9416
|
-
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
|
|
9417
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
9418
|
-
*/
|
|
9419
|
-
async getBigUint64(offset) {
|
|
9420
|
-
const arrayBuffer = await this.file.read(offset, 8);
|
|
9421
|
-
const val = new BigInt64Array(arrayBuffer).at(0);
|
|
9422
|
-
if (val === void 0) {
|
|
9423
|
-
throw new Error("something went wrong");
|
|
9424
|
-
}
|
|
9425
|
-
return val;
|
|
9426
|
-
}
|
|
9427
|
-
/**
|
|
9428
|
-
* returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.
|
|
9429
|
-
* @param startOffset The offset, in byte, from the start of the file where to start reading the data.
|
|
9430
|
-
* @param endOffset The offset, in bytes, from the start of the file where to end reading the data.
|
|
9431
|
-
*/
|
|
9432
|
-
async slice(startOffset, endOffset) {
|
|
9433
|
-
const bigLength = endOffset - startOffset;
|
|
9434
|
-
if (bigLength > Number.MAX_SAFE_INTEGER) {
|
|
9435
|
-
throw new Error("too big slice");
|
|
9436
|
-
}
|
|
9437
|
-
const length4 = Number(bigLength);
|
|
9438
|
-
return await this.file.read(startOffset, length4);
|
|
9439
|
-
}
|
|
9440
|
-
/**
|
|
9441
|
-
* the length (in bytes) of the data.
|
|
9442
|
-
*/
|
|
9443
|
-
get length() {
|
|
9444
|
-
return this.file.bigsize;
|
|
9445
|
-
}
|
|
9446
|
-
};
|
|
9447
|
-
|
|
9448
|
-
// ../loader-utils/src/lib/file-provider/data-view-file.ts
|
|
9449
|
-
var toNumber = (bigint) => {
|
|
9450
|
-
if (bigint > Number.MAX_SAFE_INTEGER) {
|
|
9451
|
-
throw new Error("Offset is out of bounds");
|
|
9452
|
-
}
|
|
9453
|
-
return Number(bigint);
|
|
9454
|
-
};
|
|
9455
|
-
var DataViewFile = class {
|
|
9456
|
-
/** The DataView from which data is provided */
|
|
9457
|
-
file;
|
|
9458
|
-
constructor(file) {
|
|
9459
|
-
this.file = file;
|
|
9460
|
-
}
|
|
9461
|
-
async destroy() {
|
|
9462
|
-
}
|
|
9463
|
-
/**
|
|
9464
|
-
* Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.
|
|
9465
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
9466
|
-
*/
|
|
9467
|
-
async getUint8(offset) {
|
|
9468
|
-
return this.file.getUint8(toNumber(offset));
|
|
9469
|
-
}
|
|
9470
|
-
/**
|
|
9471
|
-
* Gets an unsigned 16-bit intege at the specified byte offset from the start of the file.
|
|
9472
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
9473
|
-
*/
|
|
9474
|
-
async getUint16(offset) {
|
|
9475
|
-
return this.file.getUint16(toNumber(offset), true);
|
|
9476
|
-
}
|
|
9477
|
-
/**
|
|
9478
|
-
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
|
|
9479
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
9480
|
-
*/
|
|
9481
|
-
async getUint32(offset) {
|
|
9482
|
-
return this.file.getUint32(toNumber(offset), true);
|
|
9483
|
-
}
|
|
9484
|
-
/**
|
|
9485
|
-
* Gets an unsigned 64-bit integer at the specified byte offset from the start of the file.
|
|
9486
|
-
* @param offset The offset, in bytes, from the start of the file where to read the data.
|
|
9487
|
-
*/
|
|
9488
|
-
async getBigUint64(offset) {
|
|
9489
|
-
return this.file.getBigUint64(toNumber(offset), true);
|
|
9490
|
-
}
|
|
9491
|
-
/**
|
|
9492
|
-
* returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.
|
|
9493
|
-
* @param startOffset The offset, in bytes, from the start of the file where to start reading the data.
|
|
9494
|
-
* @param endOffset The offset, in bytes, from the start of the file where to end reading the data.
|
|
9495
|
-
*/
|
|
9496
|
-
async slice(startOffset, endOffset) {
|
|
9497
|
-
return this.file.buffer.slice(toNumber(startOffset), toNumber(endOffset));
|
|
9498
|
-
}
|
|
9499
|
-
/** the length (in bytes) of the data. */
|
|
9500
|
-
get length() {
|
|
9501
|
-
return BigInt(this.file.byteLength);
|
|
9502
|
-
}
|
|
9503
|
-
};
|
|
9504
|
-
|
|
9505
9433
|
// ../images/src/lib/utils/version.ts
|
|
9506
9434
|
var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
9507
9435
|
|
|
@@ -9641,7 +9569,6 @@ var __exports__ = (() => {
|
|
|
9641
9569
|
}
|
|
9642
9570
|
|
|
9643
9571
|
// ../images/src/lib/parsers/parse-to-image-bitmap.ts
|
|
9644
|
-
var EMPTY_OBJECT = {};
|
|
9645
9572
|
var imagebitmapOptionsSupported = true;
|
|
9646
9573
|
async function parseToImageBitmap(arrayBuffer, options, url) {
|
|
9647
9574
|
let blob;
|
|
@@ -9669,8 +9596,13 @@ var __exports__ = (() => {
|
|
|
9669
9596
|
return await createImageBitmap(blob);
|
|
9670
9597
|
}
|
|
9671
9598
|
function isEmptyObject(object) {
|
|
9672
|
-
|
|
9673
|
-
return
|
|
9599
|
+
if (!object) {
|
|
9600
|
+
return true;
|
|
9601
|
+
}
|
|
9602
|
+
for (const key in object) {
|
|
9603
|
+
if (Object.prototype.hasOwnProperty.call(object, key)) {
|
|
9604
|
+
return false;
|
|
9605
|
+
}
|
|
9674
9606
|
}
|
|
9675
9607
|
return true;
|
|
9676
9608
|
}
|
|
@@ -9931,21 +9863,32 @@ var __exports__ = (() => {
|
|
|
9931
9863
|
[DRACO_EXTERNAL_LIBRARIES.ENCODER]: `https://raw.githubusercontent.com/google/draco/${DRACO_ENCODER_VERSION}/javascript/${DRACO_EXTERNAL_LIBRARIES.ENCODER}`
|
|
9932
9864
|
};
|
|
9933
9865
|
var loadDecoderPromise;
|
|
9934
|
-
async function loadDracoDecoderModule(options) {
|
|
9866
|
+
async function loadDracoDecoderModule(options = {}, type) {
|
|
9935
9867
|
const modules = options.modules || {};
|
|
9936
9868
|
if (modules.draco3d) {
|
|
9937
9869
|
loadDecoderPromise ||= modules.draco3d.createDecoderModule({}).then((draco) => {
|
|
9938
9870
|
return { draco };
|
|
9939
9871
|
});
|
|
9940
9872
|
} else {
|
|
9941
|
-
loadDecoderPromise ||= loadDracoDecoder(options);
|
|
9873
|
+
loadDecoderPromise ||= loadDracoDecoder(options, type);
|
|
9942
9874
|
}
|
|
9943
9875
|
return await loadDecoderPromise;
|
|
9944
9876
|
}
|
|
9945
|
-
|
|
9877
|
+
function getLibraryExport(library, exportName) {
|
|
9878
|
+
if (library && typeof library === "object") {
|
|
9879
|
+
if (library.default) {
|
|
9880
|
+
return library.default;
|
|
9881
|
+
}
|
|
9882
|
+
if (library[exportName]) {
|
|
9883
|
+
return library[exportName];
|
|
9884
|
+
}
|
|
9885
|
+
}
|
|
9886
|
+
return library;
|
|
9887
|
+
}
|
|
9888
|
+
async function loadDracoDecoder(options, type) {
|
|
9946
9889
|
let DracoDecoderModule;
|
|
9947
9890
|
let wasmBinary;
|
|
9948
|
-
switch (
|
|
9891
|
+
switch (type) {
|
|
9949
9892
|
case "js":
|
|
9950
9893
|
DracoDecoderModule = await loadLibrary(
|
|
9951
9894
|
DRACO_EXTERNAL_LIBRARY_URLS[DRACO_EXTERNAL_LIBRARIES.FALLBACK_DECODER],
|
|
@@ -9956,25 +9899,52 @@ var __exports__ = (() => {
|
|
|
9956
9899
|
break;
|
|
9957
9900
|
case "wasm":
|
|
9958
9901
|
default:
|
|
9959
|
-
|
|
9960
|
-
await
|
|
9961
|
-
|
|
9962
|
-
|
|
9963
|
-
|
|
9964
|
-
|
|
9965
|
-
|
|
9966
|
-
|
|
9967
|
-
|
|
9968
|
-
|
|
9969
|
-
|
|
9970
|
-
|
|
9971
|
-
|
|
9972
|
-
|
|
9902
|
+
try {
|
|
9903
|
+
[DracoDecoderModule, wasmBinary] = await Promise.all([
|
|
9904
|
+
await loadLibrary(
|
|
9905
|
+
DRACO_EXTERNAL_LIBRARY_URLS[DRACO_EXTERNAL_LIBRARIES.DECODER],
|
|
9906
|
+
"draco",
|
|
9907
|
+
options,
|
|
9908
|
+
DRACO_EXTERNAL_LIBRARIES.DECODER
|
|
9909
|
+
),
|
|
9910
|
+
await loadLibrary(
|
|
9911
|
+
DRACO_EXTERNAL_LIBRARY_URLS[DRACO_EXTERNAL_LIBRARIES.DECODER_WASM],
|
|
9912
|
+
"draco",
|
|
9913
|
+
options,
|
|
9914
|
+
DRACO_EXTERNAL_LIBRARIES.DECODER_WASM
|
|
9915
|
+
)
|
|
9916
|
+
]);
|
|
9917
|
+
} catch {
|
|
9918
|
+
DracoDecoderModule = null;
|
|
9919
|
+
wasmBinary = null;
|
|
9920
|
+
}
|
|
9973
9921
|
}
|
|
9922
|
+
DracoDecoderModule = getLibraryExport(DracoDecoderModule, "DracoDecoderModule");
|
|
9974
9923
|
DracoDecoderModule = DracoDecoderModule || globalThis.DracoDecoderModule;
|
|
9924
|
+
if (!DracoDecoderModule && !isBrowser2) {
|
|
9925
|
+
[DracoDecoderModule, wasmBinary] = await Promise.all([
|
|
9926
|
+
await loadLibrary(
|
|
9927
|
+
DRACO_EXTERNAL_LIBRARY_URLS[DRACO_EXTERNAL_LIBRARIES.DECODER],
|
|
9928
|
+
"draco",
|
|
9929
|
+
{ ...options, useLocalLibraries: true },
|
|
9930
|
+
DRACO_EXTERNAL_LIBRARIES.DECODER
|
|
9931
|
+
),
|
|
9932
|
+
await loadLibrary(
|
|
9933
|
+
DRACO_EXTERNAL_LIBRARY_URLS[DRACO_EXTERNAL_LIBRARIES.DECODER_WASM],
|
|
9934
|
+
"draco",
|
|
9935
|
+
{ ...options, useLocalLibraries: true },
|
|
9936
|
+
DRACO_EXTERNAL_LIBRARIES.DECODER_WASM
|
|
9937
|
+
)
|
|
9938
|
+
]);
|
|
9939
|
+
DracoDecoderModule = getLibraryExport(DracoDecoderModule, "DracoDecoderModule");
|
|
9940
|
+
DracoDecoderModule = DracoDecoderModule || globalThis.DracoDecoderModule;
|
|
9941
|
+
}
|
|
9975
9942
|
return await initializeDracoDecoder(DracoDecoderModule, wasmBinary);
|
|
9976
9943
|
}
|
|
9977
9944
|
function initializeDracoDecoder(DracoDecoderModule, wasmBinary) {
|
|
9945
|
+
if (typeof DracoDecoderModule !== "function") {
|
|
9946
|
+
throw new Error("DracoDecoderModule could not be loaded");
|
|
9947
|
+
}
|
|
9978
9948
|
const options = {};
|
|
9979
9949
|
if (wasmBinary) {
|
|
9980
9950
|
options.wasmBinary = wasmBinary;
|
|
@@ -10594,7 +10564,10 @@ var __exports__ = (() => {
|
|
|
10594
10564
|
parse
|
|
10595
10565
|
};
|
|
10596
10566
|
async function parse(arrayBuffer, options) {
|
|
10597
|
-
const { draco } = await loadDracoDecoderModule(
|
|
10567
|
+
const { draco } = await loadDracoDecoderModule(
|
|
10568
|
+
options?.core,
|
|
10569
|
+
options?.draco?.decoderType || "wasm"
|
|
10570
|
+
);
|
|
10598
10571
|
const dracoParser = new DracoParser(draco);
|
|
10599
10572
|
try {
|
|
10600
10573
|
return dracoParser.parseSync(arrayBuffer, options?.draco);
|
|
@@ -11284,18 +11257,18 @@ var __exports__ = (() => {
|
|
|
11284
11257
|
bgr565: { basisFormat: 15, compressed: false },
|
|
11285
11258
|
rgba4444: { basisFormat: 16, compressed: false }
|
|
11286
11259
|
};
|
|
11287
|
-
async function parseBasis(data, options) {
|
|
11288
|
-
if (options.basis.containerFormat === "auto") {
|
|
11260
|
+
async function parseBasis(data, options = {}) {
|
|
11261
|
+
if (!options.basis?.containerFormat || options.basis.containerFormat === "auto") {
|
|
11289
11262
|
if (isKTX(data)) {
|
|
11290
|
-
const fileConstructors = await loadBasisEncoderModule(options);
|
|
11263
|
+
const fileConstructors = await loadBasisEncoderModule(options?.core || {});
|
|
11291
11264
|
return parseKTX2File(fileConstructors.KTX2File, data, options);
|
|
11292
11265
|
}
|
|
11293
|
-
const { BasisFile } = await loadBasisTranscoderModule(options);
|
|
11266
|
+
const { BasisFile } = await loadBasisTranscoderModule(options?.core || {});
|
|
11294
11267
|
return parseBasisFile(BasisFile, data, options);
|
|
11295
11268
|
}
|
|
11296
11269
|
switch (options.basis.module) {
|
|
11297
11270
|
case "encoder":
|
|
11298
|
-
const fileConstructors = await loadBasisEncoderModule(options);
|
|
11271
|
+
const fileConstructors = await loadBasisEncoderModule(options?.core || {});
|
|
11299
11272
|
switch (options.basis.containerFormat) {
|
|
11300
11273
|
case "ktx2":
|
|
11301
11274
|
return parseKTX2File(fileConstructors.KTX2File, data, options);
|
|
@@ -11305,7 +11278,7 @@ var __exports__ = (() => {
|
|
|
11305
11278
|
}
|
|
11306
11279
|
case "transcoder":
|
|
11307
11280
|
default:
|
|
11308
|
-
const { BasisFile } = await loadBasisTranscoderModule(options);
|
|
11281
|
+
const { BasisFile } = await loadBasisTranscoderModule(options.core || {});
|
|
11309
11282
|
return parseBasisFile(BasisFile, data, options);
|
|
11310
11283
|
}
|
|
11311
11284
|
}
|
|
@@ -11408,7 +11381,7 @@ var __exports__ = (() => {
|
|
|
11408
11381
|
};
|
|
11409
11382
|
}
|
|
11410
11383
|
function getBasisOptions(options, hasAlpha) {
|
|
11411
|
-
let format = options
|
|
11384
|
+
let format = options.basis?.format;
|
|
11412
11385
|
if (format === "auto") {
|
|
11413
11386
|
format = selectSupportedBasisFormat();
|
|
11414
11387
|
}
|
|
@@ -11792,13 +11765,13 @@ var __exports__ = (() => {
|
|
|
11792
11765
|
var CompressedTextureLoader = {
|
|
11793
11766
|
...CompressedTextureWorkerLoader,
|
|
11794
11767
|
parse: async (arrayBuffer, options) => {
|
|
11768
|
+
options = { ...options };
|
|
11795
11769
|
if (options?.["compressed-texture"]?.useBasis) {
|
|
11796
11770
|
options.basis = {
|
|
11797
11771
|
format: {
|
|
11798
11772
|
alpha: "BC3",
|
|
11799
11773
|
noAlpha: "BC1"
|
|
11800
11774
|
},
|
|
11801
|
-
// @ts-expect-error TODO not allowed to modify inputs
|
|
11802
11775
|
...options.basis,
|
|
11803
11776
|
containerFormat: "ktx2",
|
|
11804
11777
|
module: "encoder"
|
|
@@ -12949,7 +12922,8 @@ var __exports__ = (() => {
|
|
|
12949
12922
|
let result = null;
|
|
12950
12923
|
const geometryDefinition = this.tileset.geometryDefinitions[meshGeometryData.definition];
|
|
12951
12924
|
let geometryIndex = -1;
|
|
12952
|
-
|
|
12925
|
+
const i3sOptions = this.options.i3s;
|
|
12926
|
+
if (i3sOptions && typeof i3sOptions === "object" && i3sOptions.useDracoGeometry) {
|
|
12953
12927
|
geometryIndex = geometryDefinition.geometryBuffers.findIndex(
|
|
12954
12928
|
(buffer) => buffer.compressedAttributes && buffer.compressedAttributes.encoding === "draco"
|
|
12955
12929
|
);
|
|
@@ -13048,7 +13022,8 @@ var __exports__ = (() => {
|
|
|
13048
13022
|
*/
|
|
13049
13023
|
getSupportedTextureFormats() {
|
|
13050
13024
|
const formats2 = [];
|
|
13051
|
-
|
|
13025
|
+
const i3sOptions = this.options.i3s;
|
|
13026
|
+
if (!i3sOptions || i3sOptions.useCompressedTextures) {
|
|
13052
13027
|
const supportedCompressedFormats = getSupportedGPUTextureFormats();
|
|
13053
13028
|
if (supportedCompressedFormats.has("etc2")) {
|
|
13054
13029
|
formats2.push("ktx-etc2");
|
|
@@ -13135,13 +13110,12 @@ var __exports__ = (() => {
|
|
|
13135
13110
|
nodePagesTile = new I3SNodePagesTiles(tileset, url, options);
|
|
13136
13111
|
root = await nodePagesTile.formTileFromNodePages(0);
|
|
13137
13112
|
} else {
|
|
13138
|
-
const parseOptions = options.i3s;
|
|
13113
|
+
const parseOptions = options.i3s && typeof options.i3s === "object" ? options.i3s : {};
|
|
13139
13114
|
const rootNodeUrl = getUrlWithToken(`${url}/nodes/root`, parseOptions.token);
|
|
13140
13115
|
root = await (0, import_core17.load)(rootNodeUrl, I3SLoader, {
|
|
13141
13116
|
...options,
|
|
13142
13117
|
i3s: {
|
|
13143
|
-
|
|
13144
|
-
...options.i3s,
|
|
13118
|
+
...parseOptions,
|
|
13145
13119
|
loadContent: false,
|
|
13146
13120
|
isTileHeader: true,
|
|
13147
13121
|
isTileset: false
|
|
@@ -13181,13 +13155,13 @@ var __exports__ = (() => {
|
|
|
13181
13155
|
extensions: ["bin"],
|
|
13182
13156
|
options: {
|
|
13183
13157
|
i3s: {
|
|
13184
|
-
token:
|
|
13158
|
+
token: void 0,
|
|
13185
13159
|
isTileset: "auto",
|
|
13186
13160
|
isTileHeader: "auto",
|
|
13187
|
-
tile:
|
|
13188
|
-
tileset:
|
|
13189
|
-
_tileOptions:
|
|
13190
|
-
_tilesetOptions:
|
|
13161
|
+
tile: void 0,
|
|
13162
|
+
tileset: void 0,
|
|
13163
|
+
_tileOptions: void 0,
|
|
13164
|
+
_tilesetOptions: void 0,
|
|
13191
13165
|
useDracoGeometry: true,
|
|
13192
13166
|
useCompressedTextures: true,
|
|
13193
13167
|
decodeTextures: true,
|
|
@@ -13210,7 +13184,7 @@ var __exports__ = (() => {
|
|
|
13210
13184
|
isTileset = options.i3s.isTileset;
|
|
13211
13185
|
}
|
|
13212
13186
|
let isTileHeader;
|
|
13213
|
-
if (options.isTileHeader === "auto") {
|
|
13187
|
+
if (options.i3s.isTileHeader === "auto") {
|
|
13214
13188
|
isTileHeader = TILE_HEADER_REGEX.test(urlWithoutParams);
|
|
13215
13189
|
} else {
|
|
13216
13190
|
isTileHeader = options.i3s.isTileHeader;
|
|
@@ -13246,22 +13220,109 @@ var __exports__ = (() => {
|
|
|
13246
13220
|
return null;
|
|
13247
13221
|
}
|
|
13248
13222
|
|
|
13223
|
+
// ../zip/src/parse-zip/readable-file-utils.ts
|
|
13224
|
+
function toBigInt(value) {
|
|
13225
|
+
return typeof value === "bigint" ? value : BigInt(value);
|
|
13226
|
+
}
|
|
13227
|
+
function toNumber(value) {
|
|
13228
|
+
const numberValue = Number(value);
|
|
13229
|
+
if (!Number.isFinite(numberValue)) {
|
|
13230
|
+
throw new Error("Offset is out of bounds");
|
|
13231
|
+
}
|
|
13232
|
+
return numberValue;
|
|
13233
|
+
}
|
|
13234
|
+
function normalizeOffset(offset, size) {
|
|
13235
|
+
if (offset < 0) {
|
|
13236
|
+
return Math.max(size + offset, 0);
|
|
13237
|
+
}
|
|
13238
|
+
return Math.min(offset, size);
|
|
13239
|
+
}
|
|
13240
|
+
async function readRange(file, start, end) {
|
|
13241
|
+
const startOffset = toBigInt(start);
|
|
13242
|
+
const endOffset = toBigInt(end);
|
|
13243
|
+
const length4 = endOffset - startOffset;
|
|
13244
|
+
if (length4 < 0) {
|
|
13245
|
+
throw new Error("Invalid range requested");
|
|
13246
|
+
}
|
|
13247
|
+
return await file.read(startOffset, toNumber(length4));
|
|
13248
|
+
}
|
|
13249
|
+
async function readDataView(file, start, end) {
|
|
13250
|
+
const arrayBuffer = await readRange(file, start, end);
|
|
13251
|
+
return new DataView(arrayBuffer);
|
|
13252
|
+
}
|
|
13253
|
+
async function readUint16(file, offset) {
|
|
13254
|
+
const dataView = await readDataView(file, offset, toBigInt(offset) + 2n);
|
|
13255
|
+
return dataView.getUint16(0, true);
|
|
13256
|
+
}
|
|
13257
|
+
async function readUint32(file, offset) {
|
|
13258
|
+
const dataView = await readDataView(file, offset, toBigInt(offset) + 4n);
|
|
13259
|
+
return dataView.getUint32(0, true);
|
|
13260
|
+
}
|
|
13261
|
+
async function readBigUint64(file, offset) {
|
|
13262
|
+
const dataView = await readDataView(file, offset, toBigInt(offset) + 8n);
|
|
13263
|
+
return dataView.getBigUint64(0, true);
|
|
13264
|
+
}
|
|
13265
|
+
async function getReadableFileSize(file) {
|
|
13266
|
+
if (file.bigsize > 0n) {
|
|
13267
|
+
return file.bigsize;
|
|
13268
|
+
}
|
|
13269
|
+
if (file.size > 0) {
|
|
13270
|
+
return BigInt(file.size);
|
|
13271
|
+
}
|
|
13272
|
+
if (file.stat) {
|
|
13273
|
+
const stats = await file.stat();
|
|
13274
|
+
if (stats?.bigsize !== void 0) {
|
|
13275
|
+
return stats.bigsize;
|
|
13276
|
+
}
|
|
13277
|
+
if (stats?.size !== void 0) {
|
|
13278
|
+
return BigInt(stats.size);
|
|
13279
|
+
}
|
|
13280
|
+
}
|
|
13281
|
+
return 0n;
|
|
13282
|
+
}
|
|
13283
|
+
var DataViewReadableFile = class {
|
|
13284
|
+
handle;
|
|
13285
|
+
size;
|
|
13286
|
+
bigsize;
|
|
13287
|
+
url;
|
|
13288
|
+
constructor(dataView, url = "") {
|
|
13289
|
+
this.handle = dataView;
|
|
13290
|
+
this.size = dataView.byteLength;
|
|
13291
|
+
this.bigsize = BigInt(dataView.byteLength);
|
|
13292
|
+
this.url = url;
|
|
13293
|
+
}
|
|
13294
|
+
async close() {
|
|
13295
|
+
}
|
|
13296
|
+
async stat() {
|
|
13297
|
+
return { size: this.size, bigsize: this.bigsize, isDirectory: false };
|
|
13298
|
+
}
|
|
13299
|
+
async read(start = 0, length4) {
|
|
13300
|
+
const offset = toNumber(start);
|
|
13301
|
+
const end = length4 ? offset + length4 : this.size;
|
|
13302
|
+
const normalizedStart = normalizeOffset(offset, this.size);
|
|
13303
|
+
const normalizedEnd = normalizeOffset(end, this.size);
|
|
13304
|
+
const clampedEnd = Math.max(normalizedEnd, normalizedStart);
|
|
13305
|
+
const lengthToRead = clampedEnd - normalizedStart;
|
|
13306
|
+
if (lengthToRead <= 0) {
|
|
13307
|
+
return new ArrayBuffer(0);
|
|
13308
|
+
}
|
|
13309
|
+
return copyToArrayBuffer2(this.handle.buffer, normalizedStart, lengthToRead);
|
|
13310
|
+
}
|
|
13311
|
+
};
|
|
13312
|
+
|
|
13249
13313
|
// ../zip/src/parse-zip/search-from-the-end.ts
|
|
13250
13314
|
var buffLength = 1024;
|
|
13251
13315
|
var searchFromTheEnd = async (file, target) => {
|
|
13252
|
-
const
|
|
13253
|
-
|
|
13254
|
-
|
|
13255
|
-
await file.getUint8(file.length - 3n),
|
|
13256
|
-
void 0
|
|
13257
|
-
];
|
|
13316
|
+
const fileLength = await getReadableFileSize(file);
|
|
13317
|
+
const lastBytes = new Uint8Array(await readRange(file, fileLength - 3n, fileLength + 1n));
|
|
13318
|
+
const searchWindow = [lastBytes[3], lastBytes[2], lastBytes[1], void 0];
|
|
13258
13319
|
let targetOffset = -1;
|
|
13259
|
-
let point =
|
|
13320
|
+
let point = fileLength - 4n;
|
|
13260
13321
|
do {
|
|
13261
13322
|
const prevPoint = point;
|
|
13262
13323
|
point -= BigInt(buffLength);
|
|
13263
13324
|
point = point >= 0n ? point : 0n;
|
|
13264
|
-
const buff = new Uint8Array(await file
|
|
13325
|
+
const buff = new Uint8Array(await readRange(file, point, prevPoint));
|
|
13265
13326
|
for (let i = buff.length - 1; i > -1; i--) {
|
|
13266
13327
|
searchWindow[3] = searchWindow[2];
|
|
13267
13328
|
searchWindow[2] = searchWindow[1];
|
|
@@ -13289,23 +13350,24 @@ var __exports__ = (() => {
|
|
|
13289
13350
|
var ZIP64_CD_START_OFFSET_OFFSET = 48n;
|
|
13290
13351
|
var parseEoCDRecord = async (file) => {
|
|
13291
13352
|
const zipEoCDOffset = await searchFromTheEnd(file, eoCDSignature);
|
|
13292
|
-
let cdRecordsNumber = BigInt(await file
|
|
13293
|
-
let cdByteSize = BigInt(await file
|
|
13294
|
-
let cdStartOffset = BigInt(await file
|
|
13353
|
+
let cdRecordsNumber = BigInt(await readUint16(file, zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));
|
|
13354
|
+
let cdByteSize = BigInt(await readUint32(file, zipEoCDOffset + CD_CD_BYTE_SIZE_OFFSET));
|
|
13355
|
+
let cdStartOffset = BigInt(await readUint32(file, zipEoCDOffset + CD_START_OFFSET_OFFSET));
|
|
13295
13356
|
let zip64EoCDLocatorOffset = zipEoCDOffset - 20n;
|
|
13296
13357
|
let zip64EoCDOffset = 0n;
|
|
13297
|
-
const magicBytes = await file
|
|
13298
|
-
if (compareArrayBuffers(magicBytes, zip64EoCDLocatorSignature)) {
|
|
13299
|
-
zip64EoCDOffset = await
|
|
13358
|
+
const magicBytes = await readRange(file, zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n);
|
|
13359
|
+
if (compareArrayBuffers(magicBytes, zip64EoCDLocatorSignature.buffer)) {
|
|
13360
|
+
zip64EoCDOffset = await readBigUint64(
|
|
13361
|
+
file,
|
|
13300
13362
|
zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET
|
|
13301
13363
|
);
|
|
13302
|
-
const endOfCDMagicBytes = await file
|
|
13364
|
+
const endOfCDMagicBytes = await readRange(file, zip64EoCDOffset, zip64EoCDOffset + 4n);
|
|
13303
13365
|
if (!compareArrayBuffers(endOfCDMagicBytes, zip64EoCDSignature.buffer)) {
|
|
13304
13366
|
throw new Error("zip64 EoCD not found");
|
|
13305
13367
|
}
|
|
13306
|
-
cdRecordsNumber = await file
|
|
13307
|
-
cdByteSize = await file
|
|
13308
|
-
cdStartOffset = await file
|
|
13368
|
+
cdRecordsNumber = await readBigUint64(file, zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);
|
|
13369
|
+
cdByteSize = await readBigUint64(file, zip64EoCDOffset + ZIP64_CD_CD_BYTE_SIZE_OFFSET);
|
|
13370
|
+
cdStartOffset = await readBigUint64(file, zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);
|
|
13309
13371
|
} else {
|
|
13310
13372
|
zip64EoCDLocatorOffset = 0n;
|
|
13311
13373
|
}
|
|
@@ -13469,12 +13531,11 @@ var __exports__ = (() => {
|
|
|
13469
13531
|
var CD_FILE_NAME_OFFSET = 46n;
|
|
13470
13532
|
var signature = new Uint8Array([80, 75, 1, 2]);
|
|
13471
13533
|
var parseZipCDFileHeader = async (headerOffset, file) => {
|
|
13472
|
-
|
|
13534
|
+
const fileLength = await getReadableFileSize(file);
|
|
13535
|
+
if (headerOffset >= fileLength) {
|
|
13473
13536
|
return null;
|
|
13474
13537
|
}
|
|
13475
|
-
const mainHeader =
|
|
13476
|
-
await file.slice(headerOffset, headerOffset + CD_FILE_NAME_OFFSET)
|
|
13477
|
-
);
|
|
13538
|
+
const mainHeader = await readDataView(file, headerOffset, headerOffset + CD_FILE_NAME_OFFSET);
|
|
13478
13539
|
const magicBytes = mainHeader.buffer.slice(0, 4);
|
|
13479
13540
|
if (!compareArrayBuffers(magicBytes, signature.buffer)) {
|
|
13480
13541
|
return null;
|
|
@@ -13484,7 +13545,8 @@ var __exports__ = (() => {
|
|
|
13484
13545
|
const extraFieldLength = mainHeader.getUint16(CD_EXTRA_FIELD_LENGTH_OFFSET, true);
|
|
13485
13546
|
const startDisk = BigInt(mainHeader.getUint16(CD_START_DISK_OFFSET, true));
|
|
13486
13547
|
const fileNameLength = mainHeader.getUint16(CD_FILE_NAME_LENGTH_OFFSET, true);
|
|
13487
|
-
const additionalHeader = await
|
|
13548
|
+
const additionalHeader = await readRange(
|
|
13549
|
+
file,
|
|
13488
13550
|
headerOffset + CD_FILE_NAME_OFFSET,
|
|
13489
13551
|
headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength)
|
|
13490
13552
|
);
|
|
@@ -13514,8 +13576,8 @@ var __exports__ = (() => {
|
|
|
13514
13576
|
};
|
|
13515
13577
|
async function* makeZipCDHeaderIterator(fileProvider) {
|
|
13516
13578
|
const { cdStartOffset, cdByteSize } = await parseEoCDRecord(fileProvider);
|
|
13517
|
-
const centralDirectory = new
|
|
13518
|
-
new DataView(await fileProvider
|
|
13579
|
+
const centralDirectory = new DataViewReadableFile(
|
|
13580
|
+
new DataView(await readRange(fileProvider, cdStartOffset, cdStartOffset + cdByteSize))
|
|
13519
13581
|
);
|
|
13520
13582
|
let cdHeader = await parseZipCDFileHeader(0n, centralDirectory);
|
|
13521
13583
|
while (cdHeader) {
|
|
@@ -13677,14 +13739,15 @@ var __exports__ = (() => {
|
|
|
13677
13739
|
var FILE_NAME_OFFSET = 30n;
|
|
13678
13740
|
var signature2 = new Uint8Array([80, 75, 3, 4]);
|
|
13679
13741
|
var parseZipLocalFileHeader = async (headerOffset, file) => {
|
|
13680
|
-
const mainHeader =
|
|
13742
|
+
const mainHeader = await readDataView(file, headerOffset, headerOffset + FILE_NAME_OFFSET);
|
|
13681
13743
|
const magicBytes = mainHeader.buffer.slice(0, 4);
|
|
13682
|
-
if (!compareArrayBuffers(magicBytes, signature2)) {
|
|
13744
|
+
if (!compareArrayBuffers(magicBytes, signature2.buffer)) {
|
|
13683
13745
|
return null;
|
|
13684
13746
|
}
|
|
13685
13747
|
const fileNameLength = mainHeader.getUint16(FILE_NAME_LENGTH_OFFSET, true);
|
|
13686
13748
|
const extraFieldLength = mainHeader.getUint16(EXTRA_FIELD_LENGTH_OFFSET, true);
|
|
13687
|
-
const additionalHeader = await
|
|
13749
|
+
const additionalHeader = await readRange(
|
|
13750
|
+
file,
|
|
13688
13751
|
headerOffset + FILE_NAME_OFFSET,
|
|
13689
13752
|
headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength)
|
|
13690
13753
|
);
|
|
@@ -13883,16 +13946,16 @@ var __exports__ = (() => {
|
|
|
13883
13946
|
}
|
|
13884
13947
|
|
|
13885
13948
|
// ../crypto/src/lib/algorithms/md5-wasm.ts
|
|
13886
|
-
var
|
|
13887
|
-
var
|
|
13949
|
+
var BASE64_LOOKUP = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
|
13950
|
+
var wasmB64Bytes = decodeBase64Safely(
|
|
13888
13951
|
"AGFzbQEAAAABDANgAX8AYAAAYAABfwIeAgdpbXBvcnRzA2xvZwAAB2ltcG9ydHMDbWVtAgABAzIxAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAAAAAAAAAgICAgIAAAAAAAaYARt/AUGBxpS6Bgt/AUGJ17b+fgt/AUH+uevFeQt/AUH2qMmBAQt/AUEAC38BQQALfwFBAAt/AUEAC38BQQALfwFBAAt/AUEAC38BQQALfwFBAAt/AUEAC38BQQALfwFBAAt/AUEAC38BQQALfwFBAAt/AUEAC38BQQALfwFBAAt/AUEAC38BQQALfwFBAAt/AUEAC38BQQALB7oCJQhvbmVGdWxsQQAYCG9uZUZ1bGxCABkIb25lRnVsbEMAGghvbmVGdWxsRAAbBWxvb3BzAAEEbG9vcAACBXByaW1lAAMFbG9vcEEABAZsb29wQTEABQZsb29wQTIABgZsb29wQTMABwZsb29wQTQACAVsb29wQgAJBmxvb3BCMQAKBmxvb3BCMgALBmxvb3BCMwAMBmxvb3BCNAANBWxvb3BDAA4GbG9vcEMxAA8GbG9vcEMyABAGbG9vcEMzABEGbG9vcEM0ABIFbG9vcEQAEwZsb29wRDEAFAZsb29wRDIAFQZsb29wRDMAFgZsb29wRDQAFwRnZXRBACgEZ2V0QgApBGdldEMAKgRnZXREACsEZ2V0WAAsBHNldEEALQRzZXRCAC4Ec2V0QwAvBHNldEQAMARzZXRYADEKzA0xWwEBf0EAJAggAEEGdCEBAkADQCMIIAFGDQEjACQEIwEkBSMCJAYjAyQHEAIjBCMAaiQAIwUjAWokASMGIwJqJAIjByMDaiQDIwhBwABqJAgMAAsLIwgjGmokGgsTACMIIxpqJAkQAxAEEAkQDhATC6IBAEEAIwlqKAIAJApBBCMJaigCACQLQQgjCWooAgAkDEEMIwlqKAIAJA1BECMJaigCACQOQRQjCWooAgAkD0EYIwlqKAIAJBBBHCMJaigCACQRQSAjCWooAgAkEkEkIwlqKAIAJBNBKCMJaigCACQUQSwjCWooAgAkFUEwIwlqKAIAJBZBNCMJaigCACQXQTgjCWooAgAkGEE8IwlqKAIAJBkLCgAQBRAGEAcQCAsuAEH4yKq7fSMKahAYQdbunsZ+IwtqEBtB2+GBoQIjDGoQGkHunfeNfCMNahAZCy0AQa+f8Kt/Iw5qEBhBqoyfvAQjD2oQG0GTjMHBeiMQahAaQYGqmmojEWoQGQssAEHYsYLMBiMSahAYQa/vk9p4IxNqEBtBsbd9IxRqEBpBvq/zyngjFWoQGQstAEGiosDcBiMWahAYQZPj4WwjF2oQG0GOh+WzeiMYahAaQaGQ0M0EIxlqEBkLCgAQChALEAwQDQsuAEHiyviwfyMLahAcQcDmgoJ8IxBqEB9B0bT5sgIjFWoQHkGqj9vNfiMKahAdCy0AQd2gvLF9Iw9qEBxB06iQEiMUahAfQYHNh8V9IxlqEB5ByPfPvn4jDmoQHQsuAEHmm4ePAiMTahAcQdaP3Jl8IxhqEB9Bh5vUpn8jDWoQHkHtqeiqBCMSahAdCy0AQYXSj896IxdqEBxB+Me+ZyMMahAfQdmFvLsGIxFqEB5Bipmp6XgjFmoQHQsKABAPEBAQERASCysAQcLyaCMPahAgQYHtx7t4IxJqECNBosL17AYjFWoQIkGM8JRvIxhqECELLgBBxNT7pXojC2oQIEGpn/veBCMOahAjQeCW7bV/IxFqECJB8Pj+9XsjFGoQIQstAEHG/e3EAiMXahAgQfrPhNV+IwpqECNBheG8p30jDWoQIkGFuqAkIxBqECELLgBBuaDTzn0jE2oQIEHls+62fiMWahAjQfj5if0BIxlqECJB5ayxpXwjDGoQIQsKABAUEBUQFhAXCy0AQcTEpKF/IwpqECRBl/+rmQQjEWoQJ0Gnx9DceiMYahAmQbnAzmQjD2oQJQstAEHDs+2qBiMWahAkQZKZs/h4Iw1qECdB/ei/fyMUahAmQdG7kax4IwtqECULLQBBz/yh/QYjEmoQJEHgzbNxIxlqECdBlIaFmHojEGoQJkGho6DwBCMXahAlCy4AQYL9zbp/Iw5qECRBteTr6XsjFWoQJ0G7pd/WAiMMahAmQZGnm9x+IxNqECULKAEBf0F/IwFzIwNxIwEjAnFyIwBqIABqIgFBB3QgAUEZdnIjAWokAAsoAQF/QX8jAnMjAHEjAiMDcXIjAWogAGoiAUEWdCABQQp2ciMCaiQBCygBAX9BfyMDcyMBcSMDIwBxciMCaiAAaiIBQRF0IAFBD3ZyIwNqJAILKAEBf0F/IwBzIwJxIwAjAXFyIwNqIABqIgFBDHQgAUEUdnIjAGokAwsoAQF/IwJBfyMDc3EjASMDcXIjAGogAGoiAUEFdCABQRt2ciMBaiQACygBAX8jA0F/IwBzcSMCIwBxciMBaiAAaiIBQRR0IAFBDHZyIwJqJAELKAEBfyMAQX8jAXNxIwMjAXFyIwJqIABqIgFBDnQgAUESdnIjA2okAgsoAQF/IwFBfyMCc3EjACMCcXIjA2ogAGoiAUEJdCABQRd2ciMAaiQDCyIBAX8jASMCcyMDcyMAaiAAaiIBQQR0IAFBHHZyIwFqJAALIgEBfyMCIwNzIwBzIwFqIABqIgFBF3QgAUEJdnIjAmokAQsiAQF/IwMjAHMjAXMjAmogAGoiAUEQdCABQRB2ciMDaiQCCyIBAX8jACMBcyMCcyMDaiAAaiIBQQt0IAFBFXZyIwBqJAMLJQEBf0F/IwNzIwFyIwJzIwBqIABqIgFBBnQgAUEadnIjAWokAAslAQF/QX8jAHMjAnIjA3MjAWogAGoiAUEVdCABQQt2ciMCaiQBCyUBAX9BfyMBcyMDciMAcyMCaiAAaiIBQQ90IAFBEXZyIwNqJAILJQEBf0F/IwJzIwByIwFzIwNqIABqIgFBCnQgAUEWdnIjAGokAwsEACMACwQAIwELBAAjAgsEACMDCwQAIxoLBgAgACQACwYAIAAkAQsGACAAJAILBgAgACQDCwYAIAAkGgsA6gQEbmFtZQGSAzIAA2xvZwEFbG9vcHMCBGxvb3ADBXByaW1lBAVsb29wQQUGbG9vcEExBgZsb29wQTIHBmxvb3BBMwgGbG9vcEE0CQVsb29wQgoGbG9vcEIxCwZsb29wQjIMBmxvb3BCMw0GbG9vcEI0DgVsb29wQw8GbG9vcEMxEAZsb29wQzIRBmxvb3BDMxIGbG9vcEM0EwVsb29wRBQGbG9vcEQxFQZsb29wRDIWBmxvb3BEMxcGbG9vcEQ0GAhvbmVGdWxsQRkIb25lRnVsbEIaCG9uZUZ1bGxDGwhvbmVGdWxsRBwIdHdvRnVsbEEdCHR3b0Z1bGxCHgh0d29GdWxsQx8IdHdvRnVsbEQgCHRyZUZ1bGxBIQh0cmVGdWxsQiIIdHJlRnVsbEMjCHRyZUZ1bGxEJAhxdWFGdWxsQSUIcXVhRnVsbEImCHF1YUZ1bGxDJwhxdWFGdWxsRCgEZ2V0QSkEZ2V0QioEZ2V0QysEZ2V0RCwEZ2V0WC0Ec2V0QS4Ec2V0Qi8Ec2V0QzAEc2V0RDEEc2V0WALNATIAAQAAAQIAAAEIbnVtbG9vcHMCAAMABAAFAAYABwAIAAkACgALAAwADQAOAA8AEAARABIAEwAUABUAFgAXABgCAAABAW4ZAgAAAQFuGgIAAAEBbhsCAAABAW4cAgAAAQFuHQIAAAEBbh4CAAABAW4fAgAAAQFuIAIAAAEBbiECAAABAW4iAgAAAQFuIwIAAAEBbiQCAAABAW4lAgAAAQFuJgIAAAEBbicCAAABAW4oACkAKgArACwALQEAAC4BAAAvAQAAMAEAADEBAAA="
|
|
13889
13952
|
);
|
|
13890
|
-
var wasm = WebAssembly
|
|
13953
|
+
var wasm = typeof WebAssembly !== "undefined" && wasmB64Bytes ? wasmB64Bytes.buffer : false;
|
|
13891
13954
|
var crypt = makeCrypt();
|
|
13892
13955
|
var biteSize = 240 * 16 * 16;
|
|
13893
13956
|
var bounder = Math.floor(biteSize * 16 * 1.066666667);
|
|
13894
13957
|
var upperLimit = 268435456 - 65536;
|
|
13895
|
-
var parmTypeErrStr = "Parameter must be
|
|
13958
|
+
var parmTypeErrStr = "Parameter must be ArrayBuffer, ArrayBufferView or string";
|
|
13896
13959
|
var tooBigErrStr = "Parameter exceeds max size of 255.9 Mbytes";
|
|
13897
13960
|
if (!wasm) {
|
|
13898
13961
|
console.log(
|
|
@@ -13911,20 +13974,9 @@ var __exports__ = (() => {
|
|
|
13911
13974
|
catchFun = fun;
|
|
13912
13975
|
return returnObj;
|
|
13913
13976
|
};
|
|
13914
|
-
|
|
13915
|
-
|
|
13916
|
-
|
|
13917
|
-
buff = data;
|
|
13918
|
-
} else {
|
|
13919
|
-
if (data.constructor === Uint8Array || data.constructor === ArrayBuffer) {
|
|
13920
|
-
buff = data.constructor === ArrayBuffer ? new Uint8Array(data) : data;
|
|
13921
|
-
} else {
|
|
13922
|
-
getCatch(new TypeError(parmTypeErrStr));
|
|
13923
|
-
}
|
|
13924
|
-
}
|
|
13925
|
-
} else {
|
|
13926
|
-
getCatch(new TypeError(parmTypeErrStr));
|
|
13927
|
-
}
|
|
13977
|
+
buff = normalizeInput(data);
|
|
13978
|
+
if (!buff) {
|
|
13979
|
+
getCatch(new TypeError(parmTypeErrStr));
|
|
13928
13980
|
}
|
|
13929
13981
|
if (buff) {
|
|
13930
13982
|
len2 = buff.length;
|
|
@@ -14240,21 +14292,62 @@ var __exports__ = (() => {
|
|
|
14240
14292
|
return result;
|
|
14241
14293
|
};
|
|
14242
14294
|
}
|
|
14243
|
-
function
|
|
14244
|
-
|
|
14245
|
-
|
|
14246
|
-
|
|
14247
|
-
|
|
14248
|
-
while (l > i++) {
|
|
14249
|
-
buffView[i] = str3.charCodeAt(i);
|
|
14295
|
+
function decodeBase64Safely(str3) {
|
|
14296
|
+
try {
|
|
14297
|
+
return decodeBase64ToUint8Array(str3);
|
|
14298
|
+
} catch (error) {
|
|
14299
|
+
return null;
|
|
14250
14300
|
}
|
|
14251
|
-
return buffView;
|
|
14252
14301
|
}
|
|
14253
|
-
function
|
|
14254
|
-
|
|
14302
|
+
function decodeBase64ToUint8Array(base64) {
|
|
14303
|
+
var sanitized = base64.replace(/-/g, "+").replace(/_/g, "/");
|
|
14304
|
+
if (typeof atob === "function") {
|
|
14305
|
+
return binaryStringToUint8Array(atob(sanitized));
|
|
14306
|
+
}
|
|
14307
|
+
return decodeBase64WithoutAtob(sanitized);
|
|
14255
14308
|
}
|
|
14256
|
-
function
|
|
14257
|
-
|
|
14309
|
+
function decodeBase64WithoutAtob(base64) {
|
|
14310
|
+
var paddingLength = base64.endsWith("==") ? 2 : base64.endsWith("=") ? 1 : 0, byteLength = base64.length * 3 / 4 - paddingLength, bytes = new Uint8Array(byteLength), byteIndex = 0, i = 0, enc1, enc2, enc3, enc4;
|
|
14311
|
+
while (i < base64.length) {
|
|
14312
|
+
enc1 = BASE64_LOOKUP.indexOf(base64.charAt(i++));
|
|
14313
|
+
enc2 = BASE64_LOOKUP.indexOf(base64.charAt(i++));
|
|
14314
|
+
enc3 = BASE64_LOOKUP.indexOf(base64.charAt(i++));
|
|
14315
|
+
enc4 = BASE64_LOOKUP.indexOf(base64.charAt(i++));
|
|
14316
|
+
if (enc1 < 0 || enc2 < 0 || enc3 < 0 || enc4 < 0) {
|
|
14317
|
+
throw new TypeError("Invalid base64 input");
|
|
14318
|
+
}
|
|
14319
|
+
bytes[byteIndex++] = enc1 << 2 | enc2 >> 4;
|
|
14320
|
+
if (enc3 < 64 && byteIndex < byteLength) {
|
|
14321
|
+
bytes[byteIndex++] = (enc2 & 15) << 4 | enc3 >> 2;
|
|
14322
|
+
}
|
|
14323
|
+
if (enc4 < 64 && byteIndex < byteLength) {
|
|
14324
|
+
bytes[byteIndex++] = (enc3 & 3) << 6 | enc4;
|
|
14325
|
+
}
|
|
14326
|
+
}
|
|
14327
|
+
return bytes;
|
|
14328
|
+
}
|
|
14329
|
+
function binaryStringToUint8Array(str3) {
|
|
14330
|
+
var length4 = str3.length, view = new Uint8Array(length4), i = 0;
|
|
14331
|
+
while (i < length4) {
|
|
14332
|
+
view[i] = str3.charCodeAt(i);
|
|
14333
|
+
i++;
|
|
14334
|
+
}
|
|
14335
|
+
return view;
|
|
14336
|
+
}
|
|
14337
|
+
function normalizeInput(data) {
|
|
14338
|
+
if (data instanceof Uint8Array) {
|
|
14339
|
+
return data;
|
|
14340
|
+
}
|
|
14341
|
+
if (typeof ArrayBuffer !== "undefined" && data instanceof ArrayBuffer) {
|
|
14342
|
+
return new Uint8Array(data);
|
|
14343
|
+
}
|
|
14344
|
+
if (data && typeof data === "object" && ArrayBuffer.isView(data)) {
|
|
14345
|
+
return new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
|
|
14346
|
+
}
|
|
14347
|
+
if (typeof data === "string") {
|
|
14348
|
+
return new TextEncoder().encode(data);
|
|
14349
|
+
}
|
|
14350
|
+
return null;
|
|
14258
14351
|
}
|
|
14259
14352
|
function makeCrypt() {
|
|
14260
14353
|
var base64map = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
|
@@ -14401,7 +14494,7 @@ var __exports__ = (() => {
|
|
|
14401
14494
|
const pakoOptions = this.options?.deflate || {};
|
|
14402
14495
|
const inputArray = new Uint8Array(input);
|
|
14403
14496
|
const deflate = this.options?.raw ? import_pako.default.deflateRaw : import_pako.default.deflate;
|
|
14404
|
-
return deflate(inputArray, pakoOptions).buffer;
|
|
14497
|
+
return toArrayBuffer2(deflate(inputArray, pakoOptions).buffer);
|
|
14405
14498
|
}
|
|
14406
14499
|
decompressSync(input) {
|
|
14407
14500
|
if (!isBrowser && this.options.deflate?.useZlib) {
|
|
@@ -14411,7 +14504,7 @@ var __exports__ = (() => {
|
|
|
14411
14504
|
const pakoOptions = this.options?.deflate || {};
|
|
14412
14505
|
const inputArray = new Uint8Array(input);
|
|
14413
14506
|
const inflate = this.options?.raw ? import_pako.default.inflateRaw : import_pako.default.inflate;
|
|
14414
|
-
return inflate(inputArray, pakoOptions).buffer;
|
|
14507
|
+
return toArrayBuffer2(inflate(inputArray, pakoOptions).buffer);
|
|
14415
14508
|
}
|
|
14416
14509
|
async *compressBatches(asyncIterator) {
|
|
14417
14510
|
const pakoOptions = this.options?.deflate || {};
|
|
@@ -14504,34 +14597,35 @@ var __exports__ = (() => {
|
|
|
14504
14597
|
}
|
|
14505
14598
|
};
|
|
14506
14599
|
var ZipFileSystem = class {
|
|
14507
|
-
/**
|
|
14508
|
-
|
|
14600
|
+
/** File instance */
|
|
14601
|
+
file = null;
|
|
14509
14602
|
fileName;
|
|
14510
14603
|
archive = null;
|
|
14511
14604
|
/**
|
|
14512
14605
|
* Constructor
|
|
14513
|
-
* @param file - instance of
|
|
14606
|
+
* @param file - instance of ReadableFile or file path string
|
|
14514
14607
|
*/
|
|
14515
14608
|
constructor(file) {
|
|
14516
14609
|
if (typeof file === "string") {
|
|
14517
14610
|
this.fileName = file;
|
|
14518
|
-
if (
|
|
14519
|
-
|
|
14520
|
-
} else {
|
|
14521
|
-
throw new Error("Cannot open file for random access in a WEB browser");
|
|
14611
|
+
if (isBrowser) {
|
|
14612
|
+
throw new Error("ZipFileSystem cannot open file paths in browser environments");
|
|
14522
14613
|
}
|
|
14614
|
+
this.file = new NodeFileFacade(file);
|
|
14615
|
+
} else if (file instanceof Blob || file instanceof ArrayBuffer) {
|
|
14616
|
+
this.file = new BlobFile(file);
|
|
14523
14617
|
} else if (file instanceof IndexedArchive) {
|
|
14524
|
-
this.
|
|
14618
|
+
this.file = file.file;
|
|
14525
14619
|
this.archive = file;
|
|
14526
14620
|
this.fileName = file.fileName;
|
|
14527
|
-
} else
|
|
14528
|
-
this.
|
|
14621
|
+
} else {
|
|
14622
|
+
this.file = file;
|
|
14529
14623
|
}
|
|
14530
14624
|
}
|
|
14531
14625
|
/** Clean up resources */
|
|
14532
14626
|
async destroy() {
|
|
14533
|
-
if (this.
|
|
14534
|
-
await this.
|
|
14627
|
+
if (this.file) {
|
|
14628
|
+
await this.file.close();
|
|
14535
14629
|
}
|
|
14536
14630
|
}
|
|
14537
14631
|
/**
|
|
@@ -14539,11 +14633,11 @@ var __exports__ = (() => {
|
|
|
14539
14633
|
* @returns array of file names
|
|
14540
14634
|
*/
|
|
14541
14635
|
async readdir() {
|
|
14542
|
-
if (!this.
|
|
14636
|
+
if (!this.file) {
|
|
14543
14637
|
throw new Error("No data detected in the zip archive");
|
|
14544
14638
|
}
|
|
14545
14639
|
const fileNames = [];
|
|
14546
|
-
const zipCDIterator = makeZipCDHeaderIterator(this.
|
|
14640
|
+
const zipCDIterator = makeZipCDHeaderIterator(this.file);
|
|
14547
14641
|
for await (const cdHeader of zipCDIterator) {
|
|
14548
14642
|
fileNames.push(cdHeader.fileName);
|
|
14549
14643
|
}
|
|
@@ -14571,13 +14665,13 @@ var __exports__ = (() => {
|
|
|
14571
14665
|
if (this.archive) {
|
|
14572
14666
|
uncompressedFile = await this.archive.getFile(filename, "http");
|
|
14573
14667
|
} else {
|
|
14574
|
-
if (!this.
|
|
14668
|
+
if (!this.file) {
|
|
14575
14669
|
throw new Error("No data detected in the zip archive");
|
|
14576
14670
|
}
|
|
14577
14671
|
const cdFileHeader = await this.getCDFileHeader(filename);
|
|
14578
14672
|
const localFileHeader = await parseZipLocalFileHeader(
|
|
14579
14673
|
cdFileHeader.localHeaderOffset,
|
|
14580
|
-
this.
|
|
14674
|
+
this.file
|
|
14581
14675
|
);
|
|
14582
14676
|
if (!localFileHeader) {
|
|
14583
14677
|
throw new Error("Local file header has not been found in the zip archive`");
|
|
@@ -14586,7 +14680,8 @@ var __exports__ = (() => {
|
|
|
14586
14680
|
if (!compressionHandler) {
|
|
14587
14681
|
throw Error("Only Deflation compression is supported");
|
|
14588
14682
|
}
|
|
14589
|
-
const compressedFile = await
|
|
14683
|
+
const compressedFile = await readRange(
|
|
14684
|
+
this.file,
|
|
14590
14685
|
localFileHeader.fileDataOffset,
|
|
14591
14686
|
localFileHeader.fileDataOffset + localFileHeader.compressedSize
|
|
14592
14687
|
);
|
|
@@ -14604,10 +14699,10 @@ var __exports__ = (() => {
|
|
|
14604
14699
|
* @returns central directory file header
|
|
14605
14700
|
*/
|
|
14606
14701
|
async getCDFileHeader(filename) {
|
|
14607
|
-
if (!this.
|
|
14702
|
+
if (!this.file) {
|
|
14608
14703
|
throw new Error("No data detected in the zip archive");
|
|
14609
14704
|
}
|
|
14610
|
-
const zipCDIterator = makeZipCDHeaderIterator(this.
|
|
14705
|
+
const zipCDIterator = makeZipCDHeaderIterator(this.file);
|
|
14611
14706
|
let result = null;
|
|
14612
14707
|
for await (const cdHeader of zipCDIterator) {
|
|
14613
14708
|
if (cdHeader.fileName === filename) {
|
|
@@ -14624,16 +14719,16 @@ var __exports__ = (() => {
|
|
|
14624
14719
|
|
|
14625
14720
|
// ../zip/src/filesystems/IndexedArchive.ts
|
|
14626
14721
|
var IndexedArchive = class {
|
|
14627
|
-
|
|
14722
|
+
file;
|
|
14628
14723
|
fileName;
|
|
14629
14724
|
/**
|
|
14630
14725
|
* Constructor
|
|
14631
|
-
* @param fileProvider -
|
|
14726
|
+
* @param fileProvider - readable file instance for random access
|
|
14632
14727
|
* @param hashTable - pre-loaded hashTable. If presented, getFile will skip reading the hash file
|
|
14633
14728
|
* @param fileName - name of the archive. It is used to add to an URL of a loader context
|
|
14634
14729
|
*/
|
|
14635
|
-
constructor(
|
|
14636
|
-
this.
|
|
14730
|
+
constructor(file, hashTable, fileName) {
|
|
14731
|
+
this.file = file;
|
|
14637
14732
|
this.fileName = fileName;
|
|
14638
14733
|
}
|
|
14639
14734
|
/**
|
|
@@ -14642,7 +14737,7 @@ var __exports__ = (() => {
|
|
|
14642
14737
|
* @returns
|
|
14643
14738
|
*/
|
|
14644
14739
|
async getFileWithoutHash(filename) {
|
|
14645
|
-
const zipFS = new ZipFileSystem(this.
|
|
14740
|
+
const zipFS = new ZipFileSystem(this.file);
|
|
14646
14741
|
const response = await zipFS.fetch(filename);
|
|
14647
14742
|
return await response.arrayBuffer();
|
|
14648
14743
|
}
|
|
@@ -14726,7 +14821,7 @@ var __exports__ = (() => {
|
|
|
14726
14821
|
_md5Hash = new MD5Hash();
|
|
14727
14822
|
/**
|
|
14728
14823
|
* Constructor
|
|
14729
|
-
* @param fileProvider -
|
|
14824
|
+
* @param fileProvider - readable file handle for random access
|
|
14730
14825
|
* @param hashTable - pre-loaded hashTable. If presented, getFile will skip reading the hash file
|
|
14731
14826
|
* @param fileName - name of the archive. It is used to add to an URL of a loader context
|
|
14732
14827
|
*/
|
|
@@ -14802,11 +14897,12 @@ var __exports__ = (() => {
|
|
|
14802
14897
|
if (offset === void 0) {
|
|
14803
14898
|
return void 0;
|
|
14804
14899
|
}
|
|
14805
|
-
const localFileHeader = await parseZipLocalFileHeader(offset, this.
|
|
14900
|
+
const localFileHeader = await parseZipLocalFileHeader(offset, this.file);
|
|
14806
14901
|
if (!localFileHeader) {
|
|
14807
14902
|
return void 0;
|
|
14808
14903
|
}
|
|
14809
|
-
compressedFile = await
|
|
14904
|
+
compressedFile = await readRange(
|
|
14905
|
+
this.file,
|
|
14810
14906
|
localFileHeader.fileDataOffset,
|
|
14811
14907
|
localFileHeader.fileDataOffset + localFileHeader.compressedSize
|
|
14812
14908
|
);
|
|
@@ -14840,7 +14936,8 @@ var __exports__ = (() => {
|
|
|
14840
14936
|
throw new Error("corrupted SLPK");
|
|
14841
14937
|
}
|
|
14842
14938
|
const fileDataOffset = localFileHeader.fileDataOffset;
|
|
14843
|
-
const hashFile = await
|
|
14939
|
+
const hashFile = await readRange(
|
|
14940
|
+
fileProvider,
|
|
14844
14941
|
fileDataOffset,
|
|
14845
14942
|
fileDataOffset + localFileHeader.compressedSize
|
|
14846
14943
|
);
|
|
@@ -14860,9 +14957,14 @@ var __exports__ = (() => {
|
|
|
14860
14957
|
version: VERSION8,
|
|
14861
14958
|
mimeTypes: ["application/octet-stream"],
|
|
14862
14959
|
extensions: ["slpk"],
|
|
14863
|
-
options: {
|
|
14960
|
+
options: {
|
|
14961
|
+
slpk: {
|
|
14962
|
+
path: "",
|
|
14963
|
+
pathMode: void 0
|
|
14964
|
+
}
|
|
14965
|
+
},
|
|
14864
14966
|
parse: async (data, options = {}) => {
|
|
14865
|
-
const archive = await parseSLPKArchive(new
|
|
14967
|
+
const archive = await parseSLPKArchive(new DataViewReadableFile(new DataView(data)));
|
|
14866
14968
|
return archive.getFile(options.slpk?.path ?? "", options.slpk?.pathMode);
|
|
14867
14969
|
}
|
|
14868
14970
|
};
|
|
@@ -15257,8 +15359,10 @@ var __exports__ = (() => {
|
|
|
15257
15359
|
const objectIdAttributeUrl = getUrlWithToken(attributeUrls[attributeIndex], token);
|
|
15258
15360
|
const attributeType = getAttributeValueType(attributeStorageInfo[attributeIndex]);
|
|
15259
15361
|
const objectIdAttributeData = await (0, import_core21.load)(objectIdAttributeUrl, I3SAttributeLoader, {
|
|
15260
|
-
|
|
15261
|
-
|
|
15362
|
+
i3s: {
|
|
15363
|
+
attributeName,
|
|
15364
|
+
attributeType
|
|
15365
|
+
}
|
|
15262
15366
|
});
|
|
15263
15367
|
return objectIdAttributeData;
|
|
15264
15368
|
}
|