@loaders.gl/i3s 4.0.0-alpha.6 → 4.0.0-alpha.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +676 -113
- package/dist/es5/arcgis-webscene-loader.js +1 -1
- package/dist/es5/i3s-attribute-loader.js +1 -1
- package/dist/es5/i3s-building-scene-layer-loader.js +1 -1
- package/dist/es5/i3s-content-loader.js +2 -3
- package/dist/es5/i3s-content-loader.js.map +1 -1
- package/dist/es5/i3s-loader.js +1 -1
- package/dist/es5/i3s-node-page-loader.js +1 -1
- package/dist/es5/i3s-slpk-loader.js +20 -0
- package/dist/es5/i3s-slpk-loader.js.map +1 -0
- package/dist/es5/index.js +8 -1
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/parsers/parse-slpk/parse-slpk.js +94 -0
- package/dist/es5/lib/parsers/parse-slpk/parse-slpk.js.map +1 -0
- package/dist/es5/lib/parsers/parse-slpk/slpk-archieve.js +112 -0
- package/dist/es5/lib/parsers/parse-slpk/slpk-archieve.js.map +1 -0
- package/dist/es5/lib/parsers/parse-zip/cd-file-header.js +44 -0
- package/dist/es5/lib/parsers/parse-zip/cd-file-header.js.map +1 -0
- package/dist/es5/lib/parsers/parse-zip/local-file-header.js +26 -0
- package/dist/es5/lib/parsers/parse-zip/local-file-header.js.map +1 -0
- package/dist/es5/workers/{i3s-content-nodejs-worker.js → i3s-content-worker-node.js} +2 -2
- package/dist/es5/workers/i3s-content-worker-node.js.map +1 -0
- package/dist/esm/arcgis-webscene-loader.js +1 -1
- package/dist/esm/i3s-attribute-loader.js +1 -1
- package/dist/esm/i3s-building-scene-layer-loader.js +1 -1
- package/dist/esm/i3s-content-loader.js +2 -3
- package/dist/esm/i3s-content-loader.js.map +1 -1
- package/dist/esm/i3s-loader.js +1 -1
- package/dist/esm/i3s-node-page-loader.js +1 -1
- package/dist/esm/i3s-slpk-loader.js +13 -0
- package/dist/esm/i3s-slpk-loader.js.map +1 -0
- package/dist/esm/index.js +2 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/parsers/parse-slpk/parse-slpk.js +37 -0
- package/dist/esm/lib/parsers/parse-slpk/parse-slpk.js.map +1 -0
- package/dist/esm/lib/parsers/parse-slpk/slpk-archieve.js +58 -0
- package/dist/esm/lib/parsers/parse-slpk/slpk-archieve.js.map +1 -0
- package/dist/esm/lib/parsers/parse-zip/cd-file-header.js +37 -0
- package/dist/esm/lib/parsers/parse-zip/cd-file-header.js.map +1 -0
- package/dist/esm/lib/parsers/parse-zip/local-file-header.js +19 -0
- package/dist/esm/lib/parsers/parse-zip/local-file-header.js.map +1 -0
- package/dist/esm/workers/{i3s-content-nodejs-worker.js → i3s-content-worker-node.js} +2 -2
- package/dist/esm/workers/i3s-content-worker-node.js.map +1 -0
- package/dist/i3s-content-loader.d.ts.map +1 -1
- package/dist/i3s-content-loader.js +1 -2
- package/dist/i3s-content-worker-node.js +197 -0
- package/dist/{i3s-content-nodejs-worker.js.map → i3s-content-worker-node.js.map} +3 -3
- package/dist/i3s-content-worker.js +77 -55
- package/dist/i3s-slpk-loader.d.ts +10 -0
- package/dist/i3s-slpk-loader.d.ts.map +1 -0
- package/dist/i3s-slpk-loader.js +20 -0
- package/dist/index.d.ts +2 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -3
- package/dist/lib/parsers/parse-slpk/parse-slpk.d.ts +4 -0
- package/dist/lib/parsers/parse-slpk/parse-slpk.d.ts.map +1 -0
- package/dist/lib/parsers/parse-slpk/parse-slpk.js +50 -0
- package/dist/lib/parsers/parse-slpk/slpk-archieve.d.ts +32 -0
- package/dist/lib/parsers/parse-slpk/slpk-archieve.d.ts.map +1 -0
- package/dist/lib/parsers/parse-slpk/slpk-archieve.js +79 -0
- package/dist/lib/parsers/parse-zip/cd-file-header.d.ts +38 -0
- package/dist/lib/parsers/parse-zip/cd-file-header.d.ts.map +1 -0
- package/dist/lib/parsers/parse-zip/cd-file-header.js +48 -0
- package/dist/lib/parsers/parse-zip/local-file-header.d.ts +30 -0
- package/dist/lib/parsers/parse-zip/local-file-header.d.ts.map +1 -0
- package/dist/lib/parsers/parse-zip/local-file-header.js +28 -0
- package/dist/workers/i3s-content-worker-node.d.ts +2 -0
- package/dist/workers/i3s-content-worker-node.d.ts.map +1 -0
- package/dist/workers/{i3s-content-nodejs-worker.js → i3s-content-worker-node.js} +2 -1
- package/package.json +15 -13
- package/src/i3s-content-loader.ts +1 -2
- package/src/i3s-slpk-loader.ts +25 -0
- package/src/index.ts +2 -2
- package/src/lib/parsers/parse-slpk/parse-slpk.ts +61 -0
- package/src/lib/parsers/parse-slpk/slpk-archieve.ts +108 -0
- package/src/lib/parsers/parse-zip/cd-file-header.ts +93 -0
- package/src/lib/parsers/parse-zip/local-file-header.ts +56 -0
- package/src/workers/{i3s-content-nodejs-worker.ts → i3s-content-worker-node.ts} +2 -1
- package/dist/es5/workers/i3s-content-nodejs-worker.js.map +0 -1
- package/dist/esm/workers/i3s-content-nodejs-worker.js.map +0 -1
- package/dist/i3s-content-nodejs-worker.js +0 -198
- package/dist/workers/i3s-content-nodejs-worker.d.ts +0 -2
- package/dist/workers/i3s-content-nodejs-worker.d.ts.map +0 -1
package/dist/esm/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["COORDINATE_SYSTEM","I3SLoader","I3SContentLoader","I3SAttributeLoader","loadFeatureAttributes","
|
|
1
|
+
{"version":3,"file":"index.js","names":["COORDINATE_SYSTEM","I3SLoader","SLPKLoader","I3SContentLoader","I3SAttributeLoader","loadFeatureAttributes","I3SBuildingSceneLayerLoader","I3SNodePageLoader","ArcGisWebSceneLoader"],"sources":["../../src/index.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nexport type {\n BoundingVolumes,\n Mbs,\n Obb,\n SceneLayer3D,\n AttributeStorageInfo,\n Field,\n ESRIField,\n PopupInfo,\n Node3DIndexDocument,\n LodSelection,\n NodeReference,\n Resource,\n MaxScreenThresholdSQ,\n NodeInPage,\n SharedResources,\n Attribute,\n Extent,\n FeatureAttribute,\n FieldInfo,\n I3SMaterialDefinition,\n TextureDefinitionInfo,\n MaterialDefinitionInfo,\n FullExtent,\n StatisticsInfo,\n StatsInfo,\n Histogram,\n ValueCount,\n BuildingSceneSublayer,\n DATA_TYPE,\n OperationalLayer\n} from './types';\n\nexport {COORDINATE_SYSTEM} from './lib/parsers/constants';\n\nexport {I3SLoader} from './i3s-loader';\nexport {SLPKLoader} from './i3s-slpk-loader';\nexport {I3SContentLoader} from './i3s-content-loader';\nexport {I3SAttributeLoader, loadFeatureAttributes} from './i3s-attribute-loader';\nexport {I3SBuildingSceneLayerLoader} from './i3s-building-scene-layer-loader';\nexport {I3SNodePageLoader} from './i3s-node-page-loader';\nexport {ArcGisWebSceneLoader} from './arcgis-webscene-loader';\n"],"mappings":"AAmCA,SAAQA,iBAAiB,QAAO,yBAAyB;AAEzD,SAAQC,SAAS,QAAO,cAAc;AACtC,SAAQC,UAAU,QAAO,mBAAmB;AAC5C,SAAQC,gBAAgB,QAAO,sBAAsB;AACrD,SAAQC,kBAAkB,EAAEC,qBAAqB,QAAO,wBAAwB;AAChF,SAAQC,2BAA2B,QAAO,mCAAmC;AAC7E,SAAQC,iBAAiB,QAAO,wBAAwB;AACxD,SAAQC,oBAAoB,QAAO,0BAA0B"}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { parseZipCDFileHeader } from '../parse-zip/cd-file-header';
|
|
2
|
+
import { parseZipLocalFileHeader } from '../parse-zip/local-file-header';
|
|
3
|
+
import { SLPKArchive } from './slpk-archieve';
|
|
4
|
+
const getByteAt = (offset, buffer) => {
|
|
5
|
+
return buffer.getUint8(buffer.byteOffset + offset);
|
|
6
|
+
};
|
|
7
|
+
export async function parseSLPK(data) {
|
|
8
|
+
var _options$path;
|
|
9
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
10
|
+
const archive = new DataView(data);
|
|
11
|
+
const cdFileHeaderSignature = [80, 75, 1, 2];
|
|
12
|
+
const searchWindow = [getByteAt(archive.byteLength - 1, archive), getByteAt(archive.byteLength - 2, archive), getByteAt(archive.byteLength - 3, archive), undefined];
|
|
13
|
+
let hashCDOffset = 0;
|
|
14
|
+
for (let i = archive.byteLength - 4; i > -1; i--) {
|
|
15
|
+
searchWindow[3] = searchWindow[2];
|
|
16
|
+
searchWindow[2] = searchWindow[1];
|
|
17
|
+
searchWindow[1] = searchWindow[0];
|
|
18
|
+
searchWindow[0] = getByteAt(i, archive);
|
|
19
|
+
if (searchWindow.every((val, index) => val === cdFileHeaderSignature[index])) {
|
|
20
|
+
hashCDOffset = i;
|
|
21
|
+
break;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
const cdFileHeader = parseZipCDFileHeader(hashCDOffset, archive);
|
|
25
|
+
const textDecoder = new TextDecoder();
|
|
26
|
+
if (textDecoder.decode(cdFileHeader.fileName) !== '@specialIndexFileHASH128@') {
|
|
27
|
+
throw new Error('No hash file in slpk');
|
|
28
|
+
}
|
|
29
|
+
const localFileHeader = parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, archive);
|
|
30
|
+
const fileDataOffset = localFileHeader.fileDataOffset;
|
|
31
|
+
const hashFile = archive.buffer.slice(fileDataOffset, fileDataOffset + localFileHeader.compressedSize);
|
|
32
|
+
if (!hashFile) {
|
|
33
|
+
throw new Error('No hash file in slpk');
|
|
34
|
+
}
|
|
35
|
+
return await new SLPKArchive(data, hashFile).getFile((_options$path = options.path) !== null && _options$path !== void 0 ? _options$path : '');
|
|
36
|
+
}
|
|
37
|
+
//# sourceMappingURL=parse-slpk.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parse-slpk.js","names":["parseZipCDFileHeader","parseZipLocalFileHeader","SLPKArchive","getByteAt","offset","buffer","getUint8","byteOffset","parseSLPK","data","_options$path","options","arguments","length","undefined","archive","DataView","cdFileHeaderSignature","searchWindow","byteLength","hashCDOffset","i","every","val","index","cdFileHeader","textDecoder","TextDecoder","decode","fileName","Error","localFileHeader","localHeaderOffset","fileDataOffset","hashFile","slice","compressedSize","getFile","path"],"sources":["../../../../../src/lib/parsers/parse-slpk/parse-slpk.ts"],"sourcesContent":["import type {SLPKLoaderOptions} from '../../../i3s-slpk-loader';\nimport {parseZipCDFileHeader} from '../parse-zip/cd-file-header';\nimport {parseZipLocalFileHeader} from '../parse-zip/local-file-header';\nimport {SLPKArchive} from './slpk-archieve';\n\n/**\n * Returns one byte from the provided buffer at the provided position\n * @param offset - position where to read\n * @param buffer - buffer to read\n * @returns one byte from the provided buffer at the provided position\n */\nconst getByteAt = (offset: number, buffer: DataView): number => {\n return buffer.getUint8(buffer.byteOffset + offset);\n};\n\nexport async function parseSLPK(data: ArrayBuffer, options: SLPKLoaderOptions = {}) {\n const archive = new DataView(data);\n const cdFileHeaderSignature = [80, 75, 1, 2];\n\n const searchWindow = [\n getByteAt(archive.byteLength - 1, archive),\n getByteAt(archive.byteLength - 2, archive),\n getByteAt(archive.byteLength - 3, archive),\n undefined\n ];\n\n let hashCDOffset = 0;\n\n // looking for the last record in the central directory\n for (let i = archive.byteLength - 4; i > -1; i--) {\n searchWindow[3] = searchWindow[2];\n searchWindow[2] = searchWindow[1];\n searchWindow[1] = searchWindow[0];\n searchWindow[0] = getByteAt(i, archive);\n if (searchWindow.every((val, index) => val === cdFileHeaderSignature[index])) {\n hashCDOffset = i;\n break;\n }\n }\n\n const cdFileHeader = parseZipCDFileHeader(hashCDOffset, archive);\n\n const textDecoder = new TextDecoder();\n if (textDecoder.decode(cdFileHeader.fileName) !== '@specialIndexFileHASH128@') {\n throw new Error('No hash file in slpk');\n }\n\n const localFileHeader = parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, archive);\n\n const fileDataOffset = localFileHeader.fileDataOffset;\n const hashFile = archive.buffer.slice(\n fileDataOffset,\n fileDataOffset + localFileHeader.compressedSize\n );\n\n if (!hashFile) {\n throw new Error('No hash file in slpk');\n }\n\n return await new SLPKArchive(data, hashFile).getFile(options.path ?? '');\n}\n"],"mappings":"AACA,SAAQA,oBAAoB,QAAO,6BAA6B;AAChE,SAAQC,uBAAuB,QAAO,gCAAgC;AACtE,SAAQC,WAAW,QAAO,iBAAiB;AAQ3C,MAAMC,SAAS,GAAGA,CAACC,MAAc,EAAEC,MAAgB,KAAa;EAC9D,OAAOA,MAAM,CAACC,QAAQ,CAACD,MAAM,CAACE,UAAU,GAAGH,MAAM,CAAC;AACpD,CAAC;AAED,OAAO,eAAeI,SAASA,CAACC,IAAiB,EAAmC;EAAA,IAAAC,aAAA;EAAA,IAAjCC,OAA0B,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EAChF,MAAMG,OAAO,GAAG,IAAIC,QAAQ,CAACP,IAAI,CAAC;EAClC,MAAMQ,qBAAqB,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC;EAE5C,MAAMC,YAAY,GAAG,CACnBf,SAAS,CAACY,OAAO,CAACI,UAAU,GAAG,CAAC,EAAEJ,OAAO,CAAC,EAC1CZ,SAAS,CAACY,OAAO,CAACI,UAAU,GAAG,CAAC,EAAEJ,OAAO,CAAC,EAC1CZ,SAAS,CAACY,OAAO,CAACI,UAAU,GAAG,CAAC,EAAEJ,OAAO,CAAC,EAC1CD,SAAS,CACV;EAED,IAAIM,YAAY,GAAG,CAAC;EAGpB,KAAK,IAAIC,CAAC,GAAGN,OAAO,CAACI,UAAU,GAAG,CAAC,EAAEE,CAAC,GAAG,CAAC,CAAC,EAAEA,CAAC,EAAE,EAAE;IAChDH,YAAY,CAAC,CAAC,CAAC,GAAGA,YAAY,CAAC,CAAC,CAAC;IACjCA,YAAY,CAAC,CAAC,CAAC,GAAGA,YAAY,CAAC,CAAC,CAAC;IACjCA,YAAY,CAAC,CAAC,CAAC,GAAGA,YAAY,CAAC,CAAC,CAAC;IACjCA,YAAY,CAAC,CAAC,CAAC,GAAGf,SAAS,CAACkB,CAAC,EAAEN,OAAO,CAAC;IACvC,IAAIG,YAAY,CAACI,KAAK,CAAC,CAACC,GAAG,EAAEC,KAAK,KAAKD,GAAG,KAAKN,qBAAqB,CAACO,KAAK,CAAC,CAAC,EAAE;MAC5EJ,YAAY,GAAGC,CAAC;MAChB;IACF;EACF;EAEA,MAAMI,YAAY,GAAGzB,oBAAoB,CAACoB,YAAY,EAAEL,OAAO,CAAC;EAEhE,MAAMW,WAAW,GAAG,IAAIC,WAAW,CAAC,CAAC;EACrC,IAAID,WAAW,CAACE,MAAM,CAACH,YAAY,CAACI,QAAQ,CAAC,KAAK,2BAA2B,EAAE;IAC7E,MAAM,IAAIC,KAAK,CAAC,sBAAsB,CAAC;EACzC;EAEA,MAAMC,eAAe,GAAG9B,uBAAuB,CAACwB,YAAY,CAACO,iBAAiB,EAAEjB,OAAO,CAAC;EAExF,MAAMkB,cAAc,GAAGF,eAAe,CAACE,cAAc;EACrD,MAAMC,QAAQ,GAAGnB,OAAO,CAACV,MAAM,CAAC8B,KAAK,CACnCF,cAAc,EACdA,cAAc,GAAGF,eAAe,CAACK,cACnC,CAAC;EAED,IAAI,CAACF,QAAQ,EAAE;IACb,MAAM,IAAIJ,KAAK,CAAC,sBAAsB,CAAC;EACzC;EAEA,OAAO,MAAM,IAAI5B,WAAW,CAACO,IAAI,EAAEyB,QAAQ,CAAC,CAACG,OAAO,EAAA3B,aAAA,GAACC,OAAO,CAAC2B,IAAI,cAAA5B,aAAA,cAAAA,aAAA,GAAI,EAAE,CAAC;AAC1E"}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
+
import { processOnWorker } from '@loaders.gl/worker-utils';
|
|
3
|
+
import md5 from 'md5';
|
|
4
|
+
import { CompressionWorker } from '@loaders.gl/compression';
|
|
5
|
+
import { parseZipLocalFileHeader } from '../parse-zip/local-file-header';
|
|
6
|
+
export class SLPKArchive {
|
|
7
|
+
constructor(slpkArchiveBuffer, hashFile) {
|
|
8
|
+
_defineProperty(this, "slpkArchive", void 0);
|
|
9
|
+
_defineProperty(this, "hashArray", void 0);
|
|
10
|
+
this.slpkArchive = new DataView(slpkArchiveBuffer);
|
|
11
|
+
this.hashArray = this.parseHashFile(hashFile);
|
|
12
|
+
}
|
|
13
|
+
parseHashFile(hashFile) {
|
|
14
|
+
const hashFileBuffer = Buffer.from(hashFile);
|
|
15
|
+
const hashArray = [];
|
|
16
|
+
for (let i = 0; i < hashFileBuffer.buffer.byteLength; i = i + 24) {
|
|
17
|
+
const offsetBuffer = new DataView(hashFileBuffer.buffer.slice(hashFileBuffer.byteOffset + i + 16, hashFileBuffer.byteOffset + i + 24));
|
|
18
|
+
const offset = offsetBuffer.getUint32(offsetBuffer.byteOffset, true);
|
|
19
|
+
hashArray.push({
|
|
20
|
+
hash: Buffer.from(hashFileBuffer.subarray(hashFileBuffer.byteOffset + i, hashFileBuffer.byteOffset + i + 16)),
|
|
21
|
+
offset
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
return hashArray;
|
|
25
|
+
}
|
|
26
|
+
async getFile(path) {
|
|
27
|
+
let mode = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'raw';
|
|
28
|
+
if (mode === 'http') {
|
|
29
|
+
throw new Error('http mode is not supported');
|
|
30
|
+
}
|
|
31
|
+
const fileToDecompress = this.getFileBytes("".concat(path, ".gz"));
|
|
32
|
+
if (fileToDecompress) {
|
|
33
|
+
const decompressedData = await processOnWorker(CompressionWorker, fileToDecompress, {
|
|
34
|
+
compression: 'gzip',
|
|
35
|
+
operation: 'decompress',
|
|
36
|
+
_workerType: 'test',
|
|
37
|
+
gzip: {}
|
|
38
|
+
});
|
|
39
|
+
return decompressedData;
|
|
40
|
+
}
|
|
41
|
+
const fileWithoutCompression = this.getFileBytes(path);
|
|
42
|
+
if (fileWithoutCompression) {
|
|
43
|
+
return Promise.resolve(Buffer.from(fileWithoutCompression));
|
|
44
|
+
}
|
|
45
|
+
throw new Error('No such file in the archieve');
|
|
46
|
+
}
|
|
47
|
+
getFileBytes(path) {
|
|
48
|
+
const nameHash = Buffer.from(md5(path), 'hex');
|
|
49
|
+
const fileInfo = this.hashArray.find(val => Buffer.compare(val.hash, nameHash) === 0);
|
|
50
|
+
if (!fileInfo) {
|
|
51
|
+
return undefined;
|
|
52
|
+
}
|
|
53
|
+
const localFileHeader = parseZipLocalFileHeader(this.slpkArchive.byteOffset + (fileInfo === null || fileInfo === void 0 ? void 0 : fileInfo.offset), this.slpkArchive);
|
|
54
|
+
const compressedFile = this.slpkArchive.buffer.slice(localFileHeader.fileDataOffset, localFileHeader.fileDataOffset + localFileHeader.compressedSize);
|
|
55
|
+
return compressedFile;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
//# sourceMappingURL=slpk-archieve.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"slpk-archieve.js","names":["processOnWorker","md5","CompressionWorker","parseZipLocalFileHeader","SLPKArchive","constructor","slpkArchiveBuffer","hashFile","_defineProperty","slpkArchive","DataView","hashArray","parseHashFile","hashFileBuffer","Buffer","from","i","buffer","byteLength","offsetBuffer","slice","byteOffset","offset","getUint32","push","hash","subarray","getFile","path","mode","arguments","length","undefined","Error","fileToDecompress","getFileBytes","concat","decompressedData","compression","operation","_workerType","gzip","fileWithoutCompression","Promise","resolve","nameHash","fileInfo","find","val","compare","localFileHeader","compressedFile","fileDataOffset","compressedSize"],"sources":["../../../../../src/lib/parsers/parse-slpk/slpk-archieve.ts"],"sourcesContent":["import {processOnWorker} from '@loaders.gl/worker-utils';\nimport md5 from 'md5';\nimport {CompressionWorker} from '@loaders.gl/compression';\nimport {parseZipLocalFileHeader} from '../parse-zip/local-file-header';\n\n/** Element of hash array */\ntype HashElement = {\n /**\n * File name hash\n */\n hash: Buffer;\n /**\n * File offset in the archive\n */\n offset: number;\n};\n\n/**\n * Class for handling information about slpk file\n */\nexport class SLPKArchive {\n slpkArchive: DataView;\n hashArray: {hash: Buffer; offset: number}[];\n constructor(slpkArchiveBuffer: ArrayBuffer, hashFile: ArrayBuffer) {\n this.slpkArchive = new DataView(slpkArchiveBuffer);\n this.hashArray = this.parseHashFile(hashFile);\n }\n\n /**\n * Reads hash file from buffer and returns it in ready-to-use form\n * @param hashFile - bufer containing hash file\n * @returns Array containing file info\n */\n private parseHashFile(hashFile: ArrayBuffer): HashElement[] {\n const hashFileBuffer = Buffer.from(hashFile);\n const hashArray: HashElement[] = [];\n for (let i = 0; i < hashFileBuffer.buffer.byteLength; i = i + 24) {\n const offsetBuffer = new DataView(\n hashFileBuffer.buffer.slice(\n hashFileBuffer.byteOffset + i + 16,\n hashFileBuffer.byteOffset + i + 24\n )\n );\n const offset = offsetBuffer.getUint32(offsetBuffer.byteOffset, true);\n hashArray.push({\n hash: Buffer.from(\n hashFileBuffer.subarray(hashFileBuffer.byteOffset + i, hashFileBuffer.byteOffset + i + 16)\n ),\n offset\n });\n }\n return hashArray;\n }\n\n /**\n * Returns file with the given path from slpk archive\n * @param path - path inside the slpk\n * @param mode - currently only raw mode supported\n * @returns buffer with ready to use file\n */\n async getFile(path: string, mode: 'http' | 'raw' = 'raw'): Promise<Buffer> {\n if (mode === 'http') {\n throw new Error('http mode is not supported');\n }\n\n const fileToDecompress = this.getFileBytes(`${path}.gz`);\n\n if (fileToDecompress) {\n const decompressedData = await processOnWorker(CompressionWorker, fileToDecompress, {\n compression: 'gzip',\n operation: 'decompress',\n _workerType: 'test',\n gzip: {}\n });\n return decompressedData;\n }\n const fileWithoutCompression = this.getFileBytes(path);\n if (fileWithoutCompression) {\n return Promise.resolve(Buffer.from(fileWithoutCompression));\n }\n throw new Error('No such file in the archieve');\n }\n\n /**\n * Trying to get raw file data by adress\n * @param path - path inside the archive\n * @returns buffer with the raw file data\n */\n private getFileBytes(path: string): ArrayBuffer | undefined {\n const nameHash = Buffer.from(md5(path), 'hex');\n const fileInfo = this.hashArray.find((val) => Buffer.compare(val.hash, nameHash) === 0);\n if (!fileInfo) {\n return undefined;\n }\n\n const localFileHeader = parseZipLocalFileHeader(\n this.slpkArchive.byteOffset + fileInfo?.offset,\n this.slpkArchive\n );\n\n const compressedFile = this.slpkArchive.buffer.slice(\n localFileHeader.fileDataOffset,\n localFileHeader.fileDataOffset + localFileHeader.compressedSize\n );\n\n return compressedFile;\n }\n}\n"],"mappings":";AAAA,SAAQA,eAAe,QAAO,0BAA0B;AACxD,OAAOC,GAAG,MAAM,KAAK;AACrB,SAAQC,iBAAiB,QAAO,yBAAyB;AACzD,SAAQC,uBAAuB,QAAO,gCAAgC;AAiBtE,OAAO,MAAMC,WAAW,CAAC;EAGvBC,WAAWA,CAACC,iBAA8B,EAAEC,QAAqB,EAAE;IAAAC,eAAA;IAAAA,eAAA;IACjE,IAAI,CAACC,WAAW,GAAG,IAAIC,QAAQ,CAACJ,iBAAiB,CAAC;IAClD,IAAI,CAACK,SAAS,GAAG,IAAI,CAACC,aAAa,CAACL,QAAQ,CAAC;EAC/C;EAOQK,aAAaA,CAACL,QAAqB,EAAiB;IAC1D,MAAMM,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACR,QAAQ,CAAC;IAC5C,MAAMI,SAAwB,GAAG,EAAE;IACnC,KAAK,IAAIK,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,cAAc,CAACI,MAAM,CAACC,UAAU,EAAEF,CAAC,GAAGA,CAAC,GAAG,EAAE,EAAE;MAChE,MAAMG,YAAY,GAAG,IAAIT,QAAQ,CAC/BG,cAAc,CAACI,MAAM,CAACG,KAAK,CACzBP,cAAc,CAACQ,UAAU,GAAGL,CAAC,GAAG,EAAE,EAClCH,cAAc,CAACQ,UAAU,GAAGL,CAAC,GAAG,EAClC,CACF,CAAC;MACD,MAAMM,MAAM,GAAGH,YAAY,CAACI,SAAS,CAACJ,YAAY,CAACE,UAAU,EAAE,IAAI,CAAC;MACpEV,SAAS,CAACa,IAAI,CAAC;QACbC,IAAI,EAAEX,MAAM,CAACC,IAAI,CACfF,cAAc,CAACa,QAAQ,CAACb,cAAc,CAACQ,UAAU,GAAGL,CAAC,EAAEH,cAAc,CAACQ,UAAU,GAAGL,CAAC,GAAG,EAAE,CAC3F,CAAC;QACDM;MACF,CAAC,CAAC;IACJ;IACA,OAAOX,SAAS;EAClB;EAQA,MAAMgB,OAAOA,CAACC,IAAY,EAAiD;IAAA,IAA/CC,IAAoB,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,KAAK;IACtD,IAAID,IAAI,KAAK,MAAM,EAAE;MACnB,MAAM,IAAII,KAAK,CAAC,4BAA4B,CAAC;IAC/C;IAEA,MAAMC,gBAAgB,GAAG,IAAI,CAACC,YAAY,IAAAC,MAAA,CAAIR,IAAI,QAAK,CAAC;IAExD,IAAIM,gBAAgB,EAAE;MACpB,MAAMG,gBAAgB,GAAG,MAAMrC,eAAe,CAACE,iBAAiB,EAAEgC,gBAAgB,EAAE;QAClFI,WAAW,EAAE,MAAM;QACnBC,SAAS,EAAE,YAAY;QACvBC,WAAW,EAAE,MAAM;QACnBC,IAAI,EAAE,CAAC;MACT,CAAC,CAAC;MACF,OAAOJ,gBAAgB;IACzB;IACA,MAAMK,sBAAsB,GAAG,IAAI,CAACP,YAAY,CAACP,IAAI,CAAC;IACtD,IAAIc,sBAAsB,EAAE;MAC1B,OAAOC,OAAO,CAACC,OAAO,CAAC9B,MAAM,CAACC,IAAI,CAAC2B,sBAAsB,CAAC,CAAC;IAC7D;IACA,MAAM,IAAIT,KAAK,CAAC,8BAA8B,CAAC;EACjD;EAOQE,YAAYA,CAACP,IAAY,EAA2B;IAC1D,MAAMiB,QAAQ,GAAG/B,MAAM,CAACC,IAAI,CAACd,GAAG,CAAC2B,IAAI,CAAC,EAAE,KAAK,CAAC;IAC9C,MAAMkB,QAAQ,GAAG,IAAI,CAACnC,SAAS,CAACoC,IAAI,CAAEC,GAAG,IAAKlC,MAAM,CAACmC,OAAO,CAACD,GAAG,CAACvB,IAAI,EAAEoB,QAAQ,CAAC,KAAK,CAAC,CAAC;IACvF,IAAI,CAACC,QAAQ,EAAE;MACb,OAAOd,SAAS;IAClB;IAEA,MAAMkB,eAAe,GAAG/C,uBAAuB,CAC7C,IAAI,CAACM,WAAW,CAACY,UAAU,IAAGyB,QAAQ,aAARA,QAAQ,uBAARA,QAAQ,CAAExB,MAAM,GAC9C,IAAI,CAACb,WACP,CAAC;IAED,MAAM0C,cAAc,GAAG,IAAI,CAAC1C,WAAW,CAACQ,MAAM,CAACG,KAAK,CAClD8B,eAAe,CAACE,cAAc,EAC9BF,eAAe,CAACE,cAAc,GAAGF,eAAe,CAACG,cACnD,CAAC;IAED,OAAOF,cAAc;EACvB;AACF"}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
export const parseZipCDFileHeader = (headerOffset, buffer) => {
|
|
2
|
+
const offsets = {
|
|
3
|
+
CD_COMPRESSED_SIZE_OFFSET: 20,
|
|
4
|
+
CD_UNCOMPRESSED_SIZE_OFFSET: 24,
|
|
5
|
+
CD_FILE_NAME_LENGTH_OFFSET: 28,
|
|
6
|
+
CD_EXTRA_FIELD_LENGTH_OFFSET: 30,
|
|
7
|
+
CD_LOCAL_HEADER_OFFSET_OFFSET: 42,
|
|
8
|
+
CD_FILE_NAME_OFFSET: 46
|
|
9
|
+
};
|
|
10
|
+
const compressedSize = buffer.getUint32(headerOffset + offsets.CD_COMPRESSED_SIZE_OFFSET, true);
|
|
11
|
+
const uncompressedSize = buffer.getUint32(headerOffset + offsets.CD_UNCOMPRESSED_SIZE_OFFSET, true);
|
|
12
|
+
const fileNameLength = buffer.getUint16(headerOffset + offsets.CD_FILE_NAME_LENGTH_OFFSET, true);
|
|
13
|
+
const fileName = buffer.buffer.slice(headerOffset + offsets.CD_FILE_NAME_OFFSET, headerOffset + offsets.CD_FILE_NAME_OFFSET + fileNameLength);
|
|
14
|
+
const extraOffset = headerOffset + offsets.CD_FILE_NAME_OFFSET + fileNameLength;
|
|
15
|
+
const oldFormatOffset = buffer.getUint32(headerOffset + offsets.CD_LOCAL_HEADER_OFFSET_OFFSET, true);
|
|
16
|
+
let fileDataOffset = oldFormatOffset;
|
|
17
|
+
if (fileDataOffset === 0xffffffff) {
|
|
18
|
+
let offsetInZip64Data = 4;
|
|
19
|
+
if (compressedSize === 0xffffffff) {
|
|
20
|
+
offsetInZip64Data += 8;
|
|
21
|
+
}
|
|
22
|
+
if (uncompressedSize === 0xffffffff) {
|
|
23
|
+
offsetInZip64Data += 8;
|
|
24
|
+
}
|
|
25
|
+
fileDataOffset = buffer.getUint32(extraOffset + offsetInZip64Data, true);
|
|
26
|
+
}
|
|
27
|
+
const localHeaderOffset = fileDataOffset;
|
|
28
|
+
return {
|
|
29
|
+
compressedSize,
|
|
30
|
+
uncompressedSize,
|
|
31
|
+
fileNameLength,
|
|
32
|
+
fileName,
|
|
33
|
+
extraOffset,
|
|
34
|
+
localHeaderOffset
|
|
35
|
+
};
|
|
36
|
+
};
|
|
37
|
+
//# sourceMappingURL=cd-file-header.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cd-file-header.js","names":["parseZipCDFileHeader","headerOffset","buffer","offsets","CD_COMPRESSED_SIZE_OFFSET","CD_UNCOMPRESSED_SIZE_OFFSET","CD_FILE_NAME_LENGTH_OFFSET","CD_EXTRA_FIELD_LENGTH_OFFSET","CD_LOCAL_HEADER_OFFSET_OFFSET","CD_FILE_NAME_OFFSET","compressedSize","getUint32","uncompressedSize","fileNameLength","getUint16","fileName","slice","extraOffset","oldFormatOffset","fileDataOffset","offsetInZip64Data","localHeaderOffset"],"sources":["../../../../../src/lib/parsers/parse-zip/cd-file-header.ts"],"sourcesContent":["/**\n * zip central directory file header info\n * according to https://en.wikipedia.org/wiki/ZIP_(file_format)\n */\nexport type ZipCDFileHeader = {\n /**\n * Compressed size\n */\n compressedSize: number;\n /**\n * Uncompressed size\n */\n uncompressedSize: number;\n /**\n * File name length\n */\n fileNameLength: number;\n /**\n * File name\n */\n fileName: ArrayBuffer;\n /**\n * Extra field offset\n */\n extraOffset: number;\n /**\n * Relative offset of local file header\n */\n localHeaderOffset: number;\n};\n\n/**\n * Parses central directory file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipCDFileHeader = (headerOffset: number, buffer: DataView): ZipCDFileHeader => {\n const offsets = {\n CD_COMPRESSED_SIZE_OFFSET: 20,\n CD_UNCOMPRESSED_SIZE_OFFSET: 24,\n CD_FILE_NAME_LENGTH_OFFSET: 28,\n CD_EXTRA_FIELD_LENGTH_OFFSET: 30,\n CD_LOCAL_HEADER_OFFSET_OFFSET: 42,\n CD_FILE_NAME_OFFSET: 46\n };\n\n const compressedSize = buffer.getUint32(headerOffset + offsets.CD_COMPRESSED_SIZE_OFFSET, true);\n\n const uncompressedSize = buffer.getUint32(\n headerOffset + offsets.CD_UNCOMPRESSED_SIZE_OFFSET,\n true\n );\n\n const fileNameLength = buffer.getUint16(headerOffset + offsets.CD_FILE_NAME_LENGTH_OFFSET, true);\n\n const fileName = buffer.buffer.slice(\n headerOffset + offsets.CD_FILE_NAME_OFFSET,\n headerOffset + offsets.CD_FILE_NAME_OFFSET + fileNameLength\n );\n\n const extraOffset = headerOffset + offsets.CD_FILE_NAME_OFFSET + fileNameLength;\n\n const oldFormatOffset = buffer.getUint32(\n headerOffset + offsets.CD_LOCAL_HEADER_OFFSET_OFFSET,\n true\n );\n\n let fileDataOffset = oldFormatOffset;\n if (fileDataOffset === 0xffffffff) {\n let offsetInZip64Data = 4;\n // looking for info that might be also be in zip64 extra field\n if (compressedSize === 0xffffffff) {\n offsetInZip64Data += 8;\n }\n if (uncompressedSize === 0xffffffff) {\n offsetInZip64Data += 8;\n }\n\n // getUint32 needs to be replaced with getBigUint64 for archieves bigger than 2gb\n fileDataOffset = buffer.getUint32(extraOffset + offsetInZip64Data, true); // setting it to the one from zip64\n }\n const localHeaderOffset = fileDataOffset;\n\n return {\n compressedSize,\n uncompressedSize,\n fileNameLength,\n fileName,\n extraOffset,\n localHeaderOffset\n };\n};\n"],"mappings":"AAqCA,OAAO,MAAMA,oBAAoB,GAAGA,CAACC,YAAoB,EAAEC,MAAgB,KAAsB;EAC/F,MAAMC,OAAO,GAAG;IACdC,yBAAyB,EAAE,EAAE;IAC7BC,2BAA2B,EAAE,EAAE;IAC/BC,0BAA0B,EAAE,EAAE;IAC9BC,4BAA4B,EAAE,EAAE;IAChCC,6BAA6B,EAAE,EAAE;IACjCC,mBAAmB,EAAE;EACvB,CAAC;EAED,MAAMC,cAAc,GAAGR,MAAM,CAACS,SAAS,CAACV,YAAY,GAAGE,OAAO,CAACC,yBAAyB,EAAE,IAAI,CAAC;EAE/F,MAAMQ,gBAAgB,GAAGV,MAAM,CAACS,SAAS,CACvCV,YAAY,GAAGE,OAAO,CAACE,2BAA2B,EAClD,IACF,CAAC;EAED,MAAMQ,cAAc,GAAGX,MAAM,CAACY,SAAS,CAACb,YAAY,GAAGE,OAAO,CAACG,0BAA0B,EAAE,IAAI,CAAC;EAEhG,MAAMS,QAAQ,GAAGb,MAAM,CAACA,MAAM,CAACc,KAAK,CAClCf,YAAY,GAAGE,OAAO,CAACM,mBAAmB,EAC1CR,YAAY,GAAGE,OAAO,CAACM,mBAAmB,GAAGI,cAC/C,CAAC;EAED,MAAMI,WAAW,GAAGhB,YAAY,GAAGE,OAAO,CAACM,mBAAmB,GAAGI,cAAc;EAE/E,MAAMK,eAAe,GAAGhB,MAAM,CAACS,SAAS,CACtCV,YAAY,GAAGE,OAAO,CAACK,6BAA6B,EACpD,IACF,CAAC;EAED,IAAIW,cAAc,GAAGD,eAAe;EACpC,IAAIC,cAAc,KAAK,UAAU,EAAE;IACjC,IAAIC,iBAAiB,GAAG,CAAC;IAEzB,IAAIV,cAAc,KAAK,UAAU,EAAE;MACjCU,iBAAiB,IAAI,CAAC;IACxB;IACA,IAAIR,gBAAgB,KAAK,UAAU,EAAE;MACnCQ,iBAAiB,IAAI,CAAC;IACxB;IAGAD,cAAc,GAAGjB,MAAM,CAACS,SAAS,CAACM,WAAW,GAAGG,iBAAiB,EAAE,IAAI,CAAC;EAC1E;EACA,MAAMC,iBAAiB,GAAGF,cAAc;EAExC,OAAO;IACLT,cAAc;IACdE,gBAAgB;IAChBC,cAAc;IACdE,QAAQ;IACRE,WAAW;IACXI;EACF,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export const parseZipLocalFileHeader = (headerOffset, buffer) => {
|
|
2
|
+
const offsets = {
|
|
3
|
+
COMPRESSED_SIZE_OFFSET: 18,
|
|
4
|
+
FILE_NAME_LENGTH_OFFSET: 26,
|
|
5
|
+
EXTRA_FIELD_LENGTH_OFFSET: 28,
|
|
6
|
+
FILE_NAME_OFFSET: 30
|
|
7
|
+
};
|
|
8
|
+
const fileNameLength = buffer.getUint16(headerOffset + offsets.FILE_NAME_LENGTH_OFFSET, true);
|
|
9
|
+
const extraFieldLength = buffer.getUint16(headerOffset + offsets.EXTRA_FIELD_LENGTH_OFFSET, true);
|
|
10
|
+
const fileDataOffset = headerOffset + offsets.FILE_NAME_OFFSET + fileNameLength + extraFieldLength;
|
|
11
|
+
const compressedSize = buffer.getUint32(headerOffset + offsets.COMPRESSED_SIZE_OFFSET, true);
|
|
12
|
+
return {
|
|
13
|
+
fileNameLength,
|
|
14
|
+
extraFieldLength,
|
|
15
|
+
fileDataOffset,
|
|
16
|
+
compressedSize
|
|
17
|
+
};
|
|
18
|
+
};
|
|
19
|
+
//# sourceMappingURL=local-file-header.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"local-file-header.js","names":["parseZipLocalFileHeader","headerOffset","buffer","offsets","COMPRESSED_SIZE_OFFSET","FILE_NAME_LENGTH_OFFSET","EXTRA_FIELD_LENGTH_OFFSET","FILE_NAME_OFFSET","fileNameLength","getUint16","extraFieldLength","fileDataOffset","compressedSize","getUint32"],"sources":["../../../../../src/lib/parsers/parse-zip/local-file-header.ts"],"sourcesContent":["/**\n * zip local file header info\n * according to https://en.wikipedia.org/wiki/ZIP_(file_format)\n */\nexport type ZipLocalFileHeader = {\n /**\n * File name length\n */\n fileNameLength: number;\n /**\n * Extra field length\n */\n extraFieldLength: number;\n /**\n * Offset of the file data\n */\n fileDataOffset: number;\n /**\n * Compressed size\n */\n compressedSize: number;\n};\n\n/**\n * Parses local file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipLocalFileHeader = (\n headerOffset: number,\n buffer: DataView\n): ZipLocalFileHeader => {\n const offsets = {\n COMPRESSED_SIZE_OFFSET: 18,\n FILE_NAME_LENGTH_OFFSET: 26,\n EXTRA_FIELD_LENGTH_OFFSET: 28,\n FILE_NAME_OFFSET: 30\n };\n\n const fileNameLength = buffer.getUint16(headerOffset + offsets.FILE_NAME_LENGTH_OFFSET, true);\n\n const extraFieldLength = buffer.getUint16(headerOffset + offsets.EXTRA_FIELD_LENGTH_OFFSET, true);\n\n const fileDataOffset =\n headerOffset + offsets.FILE_NAME_OFFSET + fileNameLength + extraFieldLength;\n\n const compressedSize = buffer.getUint32(headerOffset + offsets.COMPRESSED_SIZE_OFFSET, true);\n\n return {\n fileNameLength,\n extraFieldLength,\n fileDataOffset,\n compressedSize\n };\n};\n"],"mappings":"AA6BA,OAAO,MAAMA,uBAAuB,GAAGA,CACrCC,YAAoB,EACpBC,MAAgB,KACO;EACvB,MAAMC,OAAO,GAAG;IACdC,sBAAsB,EAAE,EAAE;IAC1BC,uBAAuB,EAAE,EAAE;IAC3BC,yBAAyB,EAAE,EAAE;IAC7BC,gBAAgB,EAAE;EACpB,CAAC;EAED,MAAMC,cAAc,GAAGN,MAAM,CAACO,SAAS,CAACR,YAAY,GAAGE,OAAO,CAACE,uBAAuB,EAAE,IAAI,CAAC;EAE7F,MAAMK,gBAAgB,GAAGR,MAAM,CAACO,SAAS,CAACR,YAAY,GAAGE,OAAO,CAACG,yBAAyB,EAAE,IAAI,CAAC;EAEjG,MAAMK,cAAc,GAClBV,YAAY,GAAGE,OAAO,CAACI,gBAAgB,GAAGC,cAAc,GAAGE,gBAAgB;EAE7E,MAAME,cAAc,GAAGV,MAAM,CAACW,SAAS,CAACZ,YAAY,GAAGE,OAAO,CAACC,sBAAsB,EAAE,IAAI,CAAC;EAE5F,OAAO;IACLI,cAAc;IACdE,gBAAgB;IAChBC,cAAc;IACdC;EACF,CAAC;AACH,CAAC"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { createLoaderWorker } from '@loaders.gl/loader-utils';
|
|
2
1
|
import '@loaders.gl/polyfills';
|
|
2
|
+
import { createLoaderWorker } from '@loaders.gl/loader-utils';
|
|
3
3
|
import { I3SContentLoader } from '../i3s-content-loader';
|
|
4
4
|
createLoaderWorker(I3SContentLoader);
|
|
5
|
-
//# sourceMappingURL=i3s-content-
|
|
5
|
+
//# sourceMappingURL=i3s-content-worker-node.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"i3s-content-worker-node.js","names":["createLoaderWorker","I3SContentLoader"],"sources":["../../../src/workers/i3s-content-worker-node.ts"],"sourcesContent":["// Polyfills increases the bundle size significantly. Use it for NodeJS worker only\nimport '@loaders.gl/polyfills';\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\nimport {I3SContentLoader} from '../i3s-content-loader';\n\ncreateLoaderWorker(I3SContentLoader);\n"],"mappings":"AACA,OAAO,uBAAuB;AAC9B,SAAQA,kBAAkB,QAAO,0BAA0B;AAC3D,SAAQC,gBAAgB,QAAO,uBAAuB;AAEtDD,kBAAkB,CAACC,gBAAgB,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"i3s-content-loader.d.ts","sourceRoot":"","sources":["../src/i3s-content-loader.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"i3s-content-loader.d.ts","sourceRoot":"","sources":["../src/i3s-content-loader.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,gBAAgB,EAAgB,MAAM,0BAA0B,CAAC;AAS9E;;GAEG;AACH,eAAO,MAAM,gBAAgB,EAAE,gBAY9B,CAAC"}
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.I3SContentLoader = void 0;
|
|
4
|
-
const worker_utils_1 = require("@loaders.gl/worker-utils");
|
|
5
4
|
const parse_i3s_tile_content_1 = require("./lib/parsers/parse-i3s-tile-content");
|
|
6
5
|
// __VERSION__ is injected by babel-plugin-version-inline
|
|
7
6
|
// @ts-ignore TS2304: Cannot find name '__VERSION__'.
|
|
@@ -11,7 +10,7 @@ const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'beta';
|
|
|
11
10
|
*/
|
|
12
11
|
exports.I3SContentLoader = {
|
|
13
12
|
name: 'I3S Content (Indexed Scene Layers)',
|
|
14
|
-
id:
|
|
13
|
+
id: 'i3s-content',
|
|
15
14
|
module: 'i3s',
|
|
16
15
|
worker: true,
|
|
17
16
|
version: VERSION,
|