@loaders.gl/i3s 4.0.0-alpha.14 → 4.0.0-alpha.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +329 -211
- package/dist/es5/arcgis-webscene-loader.js +1 -1
- package/dist/es5/i3s-attribute-loader.js +1 -1
- package/dist/es5/i3s-building-scene-layer-loader.js +1 -1
- package/dist/es5/i3s-content-loader.js +1 -1
- package/dist/es5/i3s-loader.js +1 -1
- package/dist/es5/i3s-node-page-loader.js +1 -1
- package/dist/es5/i3s-slpk-loader.js +30 -2
- package/dist/es5/i3s-slpk-loader.js.map +1 -1
- package/dist/es5/index.js +7 -7
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/parsers/constants.js +14 -15
- package/dist/es5/lib/parsers/constants.js.map +1 -1
- package/dist/es5/lib/parsers/parse-slpk/parse-slpk.js +101 -76
- package/dist/es5/lib/parsers/parse-slpk/parse-slpk.js.map +1 -1
- package/dist/es5/lib/parsers/parse-slpk/slpk-archieve.js +54 -34
- package/dist/es5/lib/parsers/parse-slpk/slpk-archieve.js.map +1 -1
- package/dist/es5/lib/parsers/parse-zip/cd-file-header.js +76 -39
- package/dist/es5/lib/parsers/parse-zip/cd-file-header.js.map +1 -1
- package/dist/es5/lib/parsers/parse-zip/data-view-file-provider.js +129 -0
- package/dist/es5/lib/parsers/parse-zip/data-view-file-provider.js.map +1 -0
- package/dist/es5/lib/parsers/parse-zip/end-of-central-directory.js +100 -0
- package/dist/es5/lib/parsers/parse-zip/end-of-central-directory.js.map +1 -0
- package/dist/es5/lib/parsers/parse-zip/file-provider.js.map +1 -1
- package/dist/es5/lib/parsers/parse-zip/local-file-header.js +55 -14
- package/dist/es5/lib/parsers/parse-zip/local-file-header.js.map +1 -1
- package/dist/es5/lib/parsers/parse-zip/search-from-the-end.js +69 -0
- package/dist/es5/lib/parsers/parse-zip/search-from-the-end.js.map +1 -0
- package/dist/es5/types.js +1 -14
- package/dist/es5/types.js.map +1 -1
- package/dist/esm/arcgis-webscene-loader.js +1 -1
- package/dist/esm/i3s-attribute-loader.js +1 -1
- package/dist/esm/i3s-building-scene-layer-loader.js +1 -1
- package/dist/esm/i3s-content-loader.js +1 -1
- package/dist/esm/i3s-loader.js +1 -1
- package/dist/esm/i3s-node-page-loader.js +1 -1
- package/dist/esm/i3s-slpk-loader.js +8 -2
- package/dist/esm/i3s-slpk-loader.js.map +1 -1
- package/dist/esm/index.js +1 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/parsers/constants.js +14 -15
- package/dist/esm/lib/parsers/constants.js.map +1 -1
- package/dist/esm/lib/parsers/parse-slpk/parse-slpk.js +50 -35
- package/dist/esm/lib/parsers/parse-slpk/parse-slpk.js.map +1 -1
- package/dist/esm/lib/parsers/parse-slpk/slpk-archieve.js +38 -22
- package/dist/esm/lib/parsers/parse-slpk/slpk-archieve.js.map +1 -1
- package/dist/esm/lib/parsers/parse-zip/cd-file-header.js +30 -22
- package/dist/esm/lib/parsers/parse-zip/cd-file-header.js.map +1 -1
- package/dist/esm/lib/parsers/parse-zip/data-view-file-provider.js +32 -0
- package/dist/esm/lib/parsers/parse-zip/data-view-file-provider.js.map +1 -0
- package/dist/esm/lib/parsers/parse-zip/end-of-central-directory.js +33 -0
- package/dist/esm/lib/parsers/parse-zip/end-of-central-directory.js.map +1 -0
- package/dist/esm/lib/parsers/parse-zip/file-provider.js.map +1 -1
- package/dist/esm/lib/parsers/parse-zip/local-file-header.js +25 -10
- package/dist/esm/lib/parsers/parse-zip/local-file-header.js.map +1 -1
- package/dist/esm/lib/parsers/parse-zip/search-from-the-end.js +16 -0
- package/dist/esm/lib/parsers/parse-zip/search-from-the-end.js.map +1 -0
- package/dist/esm/types.js +0 -12
- package/dist/esm/types.js.map +1 -1
- package/dist/i3s-content-worker-node.js +47 -47
- package/dist/i3s-content-worker-node.js.map +2 -2
- package/dist/i3s-content-worker.js +22 -34
- package/dist/i3s-slpk-loader.d.ts +3 -0
- package/dist/i3s-slpk-loader.d.ts.map +1 -1
- package/dist/i3s-slpk-loader.js +11 -1
- package/dist/index.d.ts +3 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -1
- package/dist/lib/parsers/constants.d.ts.map +1 -1
- package/dist/lib/parsers/constants.js +14 -15
- package/dist/lib/parsers/parse-slpk/parse-slpk.d.ts +9 -3
- package/dist/lib/parsers/parse-slpk/parse-slpk.d.ts.map +1 -1
- package/dist/lib/parsers/parse-slpk/parse-slpk.js +65 -42
- package/dist/lib/parsers/parse-slpk/slpk-archieve.d.ts +22 -10
- package/dist/lib/parsers/parse-slpk/slpk-archieve.d.ts.map +1 -1
- package/dist/lib/parsers/parse-slpk/slpk-archieve.js +56 -28
- package/dist/lib/parsers/parse-zip/cd-file-header.d.ts +9 -5
- package/dist/lib/parsers/parse-zip/cd-file-header.d.ts.map +1 -1
- package/dist/lib/parsers/parse-zip/cd-file-header.js +32 -25
- package/dist/lib/parsers/parse-zip/{buffer-file-provider.d.ts → data-view-file-provider.d.ts} +15 -16
- package/dist/lib/parsers/parse-zip/data-view-file-provider.d.ts.map +1 -0
- package/dist/lib/parsers/parse-zip/{buffer-file-provider.js → data-view-file-provider.js} +28 -14
- package/dist/lib/parsers/parse-zip/end-of-central-directory.d.ts +18 -0
- package/dist/lib/parsers/parse-zip/end-of-central-directory.d.ts.map +1 -0
- package/dist/lib/parsers/parse-zip/end-of-central-directory.js +41 -0
- package/dist/lib/parsers/parse-zip/file-provider.d.ts +10 -5
- package/dist/lib/parsers/parse-zip/file-provider.d.ts.map +1 -1
- package/dist/lib/parsers/parse-zip/local-file-header.d.ts +5 -3
- package/dist/lib/parsers/parse-zip/local-file-header.d.ts.map +1 -1
- package/dist/lib/parsers/parse-zip/local-file-header.js +30 -11
- package/dist/lib/parsers/parse-zip/search-from-the-end.d.ts +11 -0
- package/dist/lib/parsers/parse-zip/search-from-the-end.d.ts.map +1 -0
- package/dist/lib/parsers/parse-zip/search-from-the-end.js +31 -0
- package/dist/types.d.ts +17 -24
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js +1 -13
- package/package.json +9 -9
- package/src/i3s-slpk-loader.ts +19 -1
- package/src/index.ts +2 -2
- package/src/lib/parsers/constants.ts +14 -15
- package/src/lib/parsers/parse-slpk/parse-slpk.ts +83 -58
- package/src/lib/parsers/parse-slpk/slpk-archieve.ts +59 -44
- package/src/lib/parsers/parse-zip/cd-file-header.ts +52 -32
- package/src/lib/parsers/parse-zip/data-view-file-provider.ts +69 -0
- package/src/lib/parsers/parse-zip/end-of-central-directory.ts +78 -0
- package/src/lib/parsers/parse-zip/file-provider.ts +11 -5
- package/src/lib/parsers/parse-zip/local-file-header.ts +45 -19
- package/src/lib/parsers/parse-zip/search-from-the-end.ts +38 -0
- package/src/types.ts +25 -40
- package/dist/es5/lib/parsers/parse-zip/buffer-file-provider.js +0 -46
- package/dist/es5/lib/parsers/parse-zip/buffer-file-provider.js.map +0 -1
- package/dist/esm/lib/parsers/parse-zip/buffer-file-provider.js +0 -23
- package/dist/esm/lib/parsers/parse-zip/buffer-file-provider.js.map +0 -1
- package/dist/lib/parsers/parse-zip/buffer-file-provider.d.ts.map +0 -1
- package/src/lib/parsers/parse-zip/buffer-file-provider.ts +0 -55
package/dist/esm/i3s-loader.js
CHANGED
|
@@ -2,7 +2,7 @@ import { parse } from '@loaders.gl/core';
|
|
|
2
2
|
import { I3SContentLoader } from './i3s-content-loader';
|
|
3
3
|
import { normalizeTileData, normalizeTilesetData } from './lib/parsers/parse-i3s';
|
|
4
4
|
import { COORDINATE_SYSTEM } from './lib/parsers/constants';
|
|
5
|
-
const VERSION = typeof "4.0.0-alpha.
|
|
5
|
+
const VERSION = typeof "4.0.0-alpha.16" !== 'undefined' ? "4.0.0-alpha.16" : 'latest';
|
|
6
6
|
const TILESET_REGEX = /layers\/[0-9]+$/;
|
|
7
7
|
const TILE_HEADER_REGEX = /nodes\/([0-9-]+|root)$/;
|
|
8
8
|
const SLPK_HEX = '504b0304';
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const VERSION = typeof "4.0.0-alpha.
|
|
1
|
+
const VERSION = typeof "4.0.0-alpha.16" !== 'undefined' ? "4.0.0-alpha.16" : 'latest';
|
|
2
2
|
async function parseNodePage(data, options) {
|
|
3
3
|
return JSON.parse(new TextDecoder().decode(data));
|
|
4
4
|
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
import { parseSLPK } from './lib/parsers/parse-slpk/parse-slpk';
|
|
2
|
-
|
|
1
|
+
import { parseSLPK as parseSLPKFromProvider } from './lib/parsers/parse-slpk/parse-slpk';
|
|
2
|
+
import { DataViewFileProvider } from './lib/parsers/parse-zip/data-view-file-provider';
|
|
3
|
+
const VERSION = typeof "4.0.0-alpha.16" !== 'undefined' ? "4.0.0-alpha.16" : 'latest';
|
|
3
4
|
export const SLPKLoader = {
|
|
4
5
|
name: 'I3S SLPK (Scene Layer Package)',
|
|
5
6
|
id: 'slpk',
|
|
@@ -10,4 +11,9 @@ export const SLPKLoader = {
|
|
|
10
11
|
extensions: ['slpk'],
|
|
11
12
|
options: {}
|
|
12
13
|
};
|
|
14
|
+
async function parseSLPK(data) {
|
|
15
|
+
var _options$slpk$path, _options$slpk, _options$slpk2;
|
|
16
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
17
|
+
return (await parseSLPKFromProvider(new DataViewFileProvider(new DataView(data)))).getFile((_options$slpk$path = (_options$slpk = options.slpk) === null || _options$slpk === void 0 ? void 0 : _options$slpk.path) !== null && _options$slpk$path !== void 0 ? _options$slpk$path : '', (_options$slpk2 = options.slpk) === null || _options$slpk2 === void 0 ? void 0 : _options$slpk2.pathMode);
|
|
18
|
+
}
|
|
13
19
|
//# sourceMappingURL=i3s-slpk-loader.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"i3s-slpk-loader.js","names":["parseSLPK","VERSION","SLPKLoader","name","id","module","version","mimeTypes","parse","extensions","options"],"sources":["../../src/i3s-slpk-loader.ts"],"sourcesContent":["import {LoaderOptions, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseSLPK} from './lib/parsers/parse-slpk/parse-slpk';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type SLPKLoaderOptions = LoaderOptions & {\n slpk?: {\n path?: string;\n pathMode?: 'http' | 'raw';\n };\n};\n\n/**\n * Loader for SLPK - Scene Layer Package\n */\nexport const SLPKLoader: LoaderWithParser<Buffer, never, SLPKLoaderOptions> = {\n name: 'I3S SLPK (Scene Layer Package)',\n id: 'slpk',\n module: 'i3s',\n version: VERSION,\n mimeTypes: ['application/octet-stream'],\n parse: parseSLPK,\n extensions: ['slpk'],\n options: {}\n};\n"],"mappings":"AACA,SAAQA,SAAS,QAAO,qCAAqC;
|
|
1
|
+
{"version":3,"file":"i3s-slpk-loader.js","names":["parseSLPK","parseSLPKFromProvider","DataViewFileProvider","VERSION","SLPKLoader","name","id","module","version","mimeTypes","parse","extensions","options","data","_options$slpk$path","_options$slpk","_options$slpk2","arguments","length","undefined","DataView","getFile","slpk","path","pathMode"],"sources":["../../src/i3s-slpk-loader.ts"],"sourcesContent":["import {LoaderOptions, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseSLPK as parseSLPKFromProvider} from './lib/parsers/parse-slpk/parse-slpk';\nimport {DataViewFileProvider} from './lib/parsers/parse-zip/data-view-file-provider';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/** options to load data from SLPK */\nexport type SLPKLoaderOptions = LoaderOptions & {\n slpk?: {\n /** path inside the slpk archive */\n path?: string;\n /** mode of the path */\n pathMode?: 'http' | 'raw';\n };\n};\n\n/**\n * Loader for SLPK - Scene Layer Package\n */\nexport const SLPKLoader: LoaderWithParser<Buffer, never, SLPKLoaderOptions> = {\n name: 'I3S SLPK (Scene Layer Package)',\n id: 'slpk',\n module: 'i3s',\n version: VERSION,\n mimeTypes: ['application/octet-stream'],\n parse: parseSLPK,\n extensions: ['slpk'],\n options: {}\n};\n\n/**\n * returns a single file from the slpk archive\n * @param data slpk archive data\n * @param options options\n * @returns requested file\n */\n\nasync function parseSLPK(data: ArrayBuffer, options: SLPKLoaderOptions = {}) {\n return (await parseSLPKFromProvider(new DataViewFileProvider(new DataView(data)))).getFile(\n options.slpk?.path ?? '',\n options.slpk?.pathMode\n );\n}\n"],"mappings":"AACA,SAAQA,SAAS,IAAIC,qBAAqB,QAAO,qCAAqC;AACtF,SAAQC,oBAAoB,QAAO,iDAAiD;AAIpF,MAAMC,OAAO,GAAG,uBAAkB,KAAK,WAAW,sBAAiB,QAAQ;AAe3E,OAAO,MAAMC,UAA8D,GAAG;EAC5EC,IAAI,EAAE,gCAAgC;EACtCC,EAAE,EAAE,MAAM;EACVC,MAAM,EAAE,KAAK;EACbC,OAAO,EAAEL,OAAO;EAChBM,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,KAAK,EAAEV,SAAS;EAChBW,UAAU,EAAE,CAAC,MAAM,CAAC;EACpBC,OAAO,EAAE,CAAC;AACZ,CAAC;AASD,eAAeZ,SAASA,CAACa,IAAiB,EAAmC;EAAA,IAAAC,kBAAA,EAAAC,aAAA,EAAAC,cAAA;EAAA,IAAjCJ,OAA0B,GAAAK,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EACzE,OAAO,CAAC,MAAMhB,qBAAqB,CAAC,IAAIC,oBAAoB,CAAC,IAAIkB,QAAQ,CAACP,IAAI,CAAC,CAAC,CAAC,EAAEQ,OAAO,EAAAP,kBAAA,IAAAC,aAAA,GACxFH,OAAO,CAACU,IAAI,cAAAP,aAAA,uBAAZA,aAAA,CAAcQ,IAAI,cAAAT,kBAAA,cAAAA,kBAAA,GAAI,EAAE,GAAAE,cAAA,GACxBJ,OAAO,CAACU,IAAI,cAAAN,cAAA,uBAAZA,cAAA,CAAcQ,QAChB,CAAC;AACH"}
|
package/dist/esm/index.js
CHANGED
|
@@ -7,5 +7,5 @@ export { I3SBuildingSceneLayerLoader } from './i3s-building-scene-layer-loader';
|
|
|
7
7
|
export { I3SNodePageLoader } from './i3s-node-page-loader';
|
|
8
8
|
export { ArcGisWebSceneLoader } from './arcgis-webscene-loader';
|
|
9
9
|
export { parseZipLocalFileHeader } from './lib/parsers/parse-zip/local-file-header';
|
|
10
|
-
export {
|
|
10
|
+
export { parseSLPK } from './lib/parsers/parse-slpk/parse-slpk';
|
|
11
11
|
//# sourceMappingURL=index.js.map
|
package/dist/esm/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["COORDINATE_SYSTEM","I3SLoader","SLPKLoader","I3SContentLoader","I3SAttributeLoader","loadFeatureAttributes","I3SBuildingSceneLayerLoader","I3SNodePageLoader","ArcGisWebSceneLoader","parseZipLocalFileHeader","
|
|
1
|
+
{"version":3,"file":"index.js","names":["COORDINATE_SYSTEM","I3SLoader","SLPKLoader","I3SContentLoader","I3SAttributeLoader","loadFeatureAttributes","I3SBuildingSceneLayerLoader","I3SNodePageLoader","ArcGisWebSceneLoader","parseZipLocalFileHeader","parseSLPK"],"sources":["../../src/index.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nexport type {\n BoundingVolumes,\n Mbs,\n Obb,\n SceneLayer3D,\n AttributeStorageInfo,\n Field,\n ESRIField,\n PopupInfo,\n Node3DIndexDocument,\n LodSelection,\n NodeReference,\n Resource,\n MaxScreenThresholdSQ,\n NodeInPage,\n SharedResources,\n Attribute,\n Extent,\n FeatureAttribute,\n FieldInfo,\n I3SMaterialDefinition,\n TextureDefinitionInfo,\n MaterialDefinitionInfo,\n FullExtent,\n StatisticsInfo,\n StatsInfo,\n Histogram,\n ValueCount,\n BuildingSceneSublayer,\n OperationalLayer,\n TextureSetDefinitionFormats\n} from './types';\nexport type {FileProvider} from './lib/parsers/parse-zip/file-provider';\n\nexport {COORDINATE_SYSTEM} from './lib/parsers/constants';\n\nexport {I3SLoader} from './i3s-loader';\nexport {SLPKLoader} from './i3s-slpk-loader';\nexport {I3SContentLoader} from './i3s-content-loader';\nexport {I3SAttributeLoader, loadFeatureAttributes} from './i3s-attribute-loader';\nexport {I3SBuildingSceneLayerLoader} from './i3s-building-scene-layer-loader';\nexport {I3SNodePageLoader} from './i3s-node-page-loader';\nexport {ArcGisWebSceneLoader} from './arcgis-webscene-loader';\nexport {parseZipLocalFileHeader} from './lib/parsers/parse-zip/local-file-header';\nexport {parseSLPK} from './lib/parsers/parse-slpk/parse-slpk';\n"],"mappings":"AAoCA,SAAQA,iBAAiB,QAAO,yBAAyB;AAEzD,SAAQC,SAAS,QAAO,cAAc;AACtC,SAAQC,UAAU,QAAO,mBAAmB;AAC5C,SAAQC,gBAAgB,QAAO,sBAAsB;AACrD,SAAQC,kBAAkB,EAAEC,qBAAqB,QAAO,wBAAwB;AAChF,SAAQC,2BAA2B,QAAO,mCAAmC;AAC7E,SAAQC,iBAAiB,QAAO,wBAAwB;AACxD,SAAQC,oBAAoB,QAAO,0BAA0B;AAC7D,SAAQC,uBAAuB,QAAO,2CAA2C;AACjF,SAAQC,SAAS,QAAO,qCAAqC"}
|
|
@@ -1,16 +1,15 @@
|
|
|
1
1
|
import GL from '@luma.gl/constants';
|
|
2
|
-
import { DATA_TYPE } from '../../types';
|
|
3
2
|
export function getConstructorForDataFormat(dataType) {
|
|
4
3
|
switch (dataType) {
|
|
5
|
-
case
|
|
4
|
+
case 'UInt8':
|
|
6
5
|
return Uint8Array;
|
|
7
|
-
case
|
|
6
|
+
case 'UInt16':
|
|
8
7
|
return Uint16Array;
|
|
9
|
-
case
|
|
8
|
+
case 'UInt32':
|
|
10
9
|
return Uint32Array;
|
|
11
|
-
case
|
|
10
|
+
case 'Float32':
|
|
12
11
|
return Float32Array;
|
|
13
|
-
case
|
|
12
|
+
case 'UInt64':
|
|
14
13
|
return Float64Array;
|
|
15
14
|
default:
|
|
16
15
|
throw new Error("parse i3s tile content: unknown type of data: ".concat(dataType));
|
|
@@ -25,18 +24,18 @@ export const GL_TYPE_MAP = {
|
|
|
25
24
|
};
|
|
26
25
|
export function sizeOf(dataType) {
|
|
27
26
|
switch (dataType) {
|
|
28
|
-
case
|
|
27
|
+
case 'UInt8':
|
|
29
28
|
return 1;
|
|
30
|
-
case
|
|
31
|
-
case
|
|
29
|
+
case 'UInt16':
|
|
30
|
+
case 'Int16':
|
|
32
31
|
return 2;
|
|
33
|
-
case
|
|
34
|
-
case
|
|
35
|
-
case
|
|
32
|
+
case 'UInt32':
|
|
33
|
+
case 'Int32':
|
|
34
|
+
case 'Float32':
|
|
36
35
|
return 4;
|
|
37
|
-
case
|
|
38
|
-
case
|
|
39
|
-
case
|
|
36
|
+
case 'UInt64':
|
|
37
|
+
case 'Int64':
|
|
38
|
+
case 'Float64':
|
|
40
39
|
return 8;
|
|
41
40
|
default:
|
|
42
41
|
throw new Error("parse i3s tile content: unknown size of data: ".concat(dataType));
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"constants.js","names":["GL","
|
|
1
|
+
{"version":3,"file":"constants.js","names":["GL","getConstructorForDataFormat","dataType","Uint8Array","Uint16Array","Uint32Array","Float32Array","Float64Array","Error","concat","GL_TYPE_MAP","UInt8","UNSIGNED_BYTE","UInt16","UNSIGNED_SHORT","Float32","FLOAT","UInt32","UNSIGNED_INT","UInt64","DOUBLE","sizeOf","STRING_ATTRIBUTE_TYPE","OBJECT_ID_ATTRIBUTE_TYPE","FLOAT_64_TYPE","INT_16_ATTRIBUTE_TYPE","COORDINATE_SYSTEM"],"sources":["../../../../src/lib/parsers/constants.ts"],"sourcesContent":["import GL from '@luma.gl/constants';\n\nexport function getConstructorForDataFormat(dataType: string) {\n switch (dataType) {\n case 'UInt8':\n return Uint8Array;\n case 'UInt16':\n return Uint16Array;\n case 'UInt32':\n return Uint32Array;\n case 'Float32':\n return Float32Array;\n case 'UInt64':\n return Float64Array;\n default:\n throw new Error(`parse i3s tile content: unknown type of data: ${dataType}`);\n }\n}\n\nexport const GL_TYPE_MAP: {[key: string]: number} = {\n UInt8: GL.UNSIGNED_BYTE,\n UInt16: GL.UNSIGNED_SHORT,\n Float32: GL.FLOAT,\n UInt32: GL.UNSIGNED_INT,\n UInt64: GL.DOUBLE\n};\n/**\n * Returns how many bytes a type occupies\n * @param dataType\n * @returns\n */\nexport function sizeOf(dataType: string): number {\n switch (dataType) {\n case 'UInt8':\n return 1;\n case 'UInt16':\n case 'Int16':\n return 2;\n case 'UInt32':\n case 'Int32':\n case 'Float32':\n return 4;\n case 'UInt64':\n case 'Int64':\n case 'Float64':\n return 8;\n default:\n throw new Error(`parse i3s tile content: unknown size of data: ${dataType}`);\n }\n}\n\nexport const STRING_ATTRIBUTE_TYPE = 'String';\nexport const OBJECT_ID_ATTRIBUTE_TYPE = 'Oid32';\nexport const FLOAT_64_TYPE = 'Float64';\nexport const INT_16_ATTRIBUTE_TYPE = 'Int16';\n\n// https://github.com/visgl/deck.gl/blob/9548f43cba2234a1f4877b6b17f6c88eb35b2e08/modules/core/src/lib/constants.js#L27\n// Describes the format of positions\nexport enum COORDINATE_SYSTEM {\n /**\n * `LNGLAT` if rendering into a geospatial viewport, `CARTESIAN` otherwise\n */\n DEFAULT = -1,\n /**\n * Positions are interpreted as [lng, lat, elevation]\n * lng lat are degrees, elevation is meters. distances as meters.\n */\n LNGLAT = 1,\n /**\n * Positions are interpreted as meter offsets, distances as meters\n */\n METER_OFFSETS = 2,\n /**\n * Positions are interpreted as lng lat offsets: [deltaLng, deltaLat, elevation]\n * deltaLng, deltaLat are delta degrees, elevation is meters.\n * distances as meters.\n */\n LNGLAT_OFFSETS = 3,\n /**\n * Non-geospatial\n */\n CARTESIAN = 0\n}\n"],"mappings":"AAAA,OAAOA,EAAE,MAAM,oBAAoB;AAEnC,OAAO,SAASC,2BAA2BA,CAACC,QAAgB,EAAE;EAC5D,QAAQA,QAAQ;IACd,KAAK,OAAO;MACV,OAAOC,UAAU;IACnB,KAAK,QAAQ;MACX,OAAOC,WAAW;IACpB,KAAK,QAAQ;MACX,OAAOC,WAAW;IACpB,KAAK,SAAS;MACZ,OAAOC,YAAY;IACrB,KAAK,QAAQ;MACX,OAAOC,YAAY;IACrB;MACE,MAAM,IAAIC,KAAK,kDAAAC,MAAA,CAAkDP,QAAQ,CAAE,CAAC;EAChF;AACF;AAEA,OAAO,MAAMQ,WAAoC,GAAG;EAClDC,KAAK,EAAEX,EAAE,CAACY,aAAa;EACvBC,MAAM,EAAEb,EAAE,CAACc,cAAc;EACzBC,OAAO,EAAEf,EAAE,CAACgB,KAAK;EACjBC,MAAM,EAAEjB,EAAE,CAACkB,YAAY;EACvBC,MAAM,EAAEnB,EAAE,CAACoB;AACb,CAAC;AAMD,OAAO,SAASC,MAAMA,CAACnB,QAAgB,EAAU;EAC/C,QAAQA,QAAQ;IACd,KAAK,OAAO;MACV,OAAO,CAAC;IACV,KAAK,QAAQ;IACb,KAAK,OAAO;MACV,OAAO,CAAC;IACV,KAAK,QAAQ;IACb,KAAK,OAAO;IACZ,KAAK,SAAS;MACZ,OAAO,CAAC;IACV,KAAK,QAAQ;IACb,KAAK,OAAO;IACZ,KAAK,SAAS;MACZ,OAAO,CAAC;IACV;MACE,MAAM,IAAIM,KAAK,kDAAAC,MAAA,CAAkDP,QAAQ,CAAE,CAAC;EAChF;AACF;AAEA,OAAO,MAAMoB,qBAAqB,GAAG,QAAQ;AAC7C,OAAO,MAAMC,wBAAwB,GAAG,OAAO;AAC/C,OAAO,MAAMC,aAAa,GAAG,SAAS;AACtC,OAAO,MAAMC,qBAAqB,GAAG,OAAO;AAI5C,WAAYC,iBAAiB,aAAjBA,iBAAiB;EAAjBA,iBAAiB,CAAjBA,iBAAiB;EAAjBA,iBAAiB,CAAjBA,iBAAiB;EAAjBA,iBAAiB,CAAjBA,iBAAiB;EAAjBA,iBAAiB,CAAjBA,iBAAiB;EAAjBA,iBAAiB,CAAjBA,iBAAiB;EAAA,OAAjBA,iBAAiB;AAAA"}
|
|
@@ -1,41 +1,56 @@
|
|
|
1
|
-
import
|
|
2
|
-
import { parseZipCDFileHeader } from '../parse-zip/cd-file-header';
|
|
1
|
+
import md5 from 'md5';
|
|
2
|
+
import { parseZipCDFileHeader, signature as cdHeaderSignature } from '../parse-zip/cd-file-header';
|
|
3
|
+
import { parseEoCDRecord } from '../parse-zip/end-of-central-directory';
|
|
3
4
|
import { parseZipLocalFileHeader } from '../parse-zip/local-file-header';
|
|
4
|
-
import {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
export async function parseSLPK(data) {
|
|
9
|
-
var _options$slpk$path, _options$slpk, _options$slpk2;
|
|
10
|
-
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
11
|
-
const archive = new DataView(data);
|
|
12
|
-
const cdFileHeaderSignature = [80, 75, 1, 2];
|
|
13
|
-
const searchWindow = [getByteAt(archive.byteLength - 1, archive), getByteAt(archive.byteLength - 2, archive), getByteAt(archive.byteLength - 3, archive), undefined];
|
|
14
|
-
let hashCDOffset = 0;
|
|
15
|
-
for (let i = archive.byteLength - 4; i > -1; i--) {
|
|
16
|
-
searchWindow[3] = searchWindow[2];
|
|
17
|
-
searchWindow[2] = searchWindow[1];
|
|
18
|
-
searchWindow[1] = searchWindow[0];
|
|
19
|
-
searchWindow[0] = getByteAt(i, archive);
|
|
20
|
-
if (searchWindow.every((val, index) => val === cdFileHeaderSignature[index])) {
|
|
21
|
-
hashCDOffset = i;
|
|
22
|
-
break;
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
const fileProvider = new DataViewFileProvider(archive);
|
|
5
|
+
import { searchFromTheEnd } from '../parse-zip/search-from-the-end';
|
|
6
|
+
import { SLPKArchive, compareHashes } from './slpk-archieve';
|
|
7
|
+
export const parseSLPK = async (fileProvider, cb) => {
|
|
8
|
+
const hashCDOffset = await searchFromTheEnd(fileProvider, cdHeaderSignature);
|
|
26
9
|
const cdFileHeader = await parseZipCDFileHeader(hashCDOffset, fileProvider);
|
|
27
|
-
|
|
28
|
-
|
|
10
|
+
let hashData;
|
|
11
|
+
if ((cdFileHeader === null || cdFileHeader === void 0 ? void 0 : cdFileHeader.fileName) !== '@specialIndexFileHASH128@') {
|
|
12
|
+
cb === null || cb === void 0 ? void 0 : cb('SLPK doesnt contain hash file');
|
|
13
|
+
hashData = await generateHashInfo(fileProvider);
|
|
14
|
+
cb === null || cb === void 0 ? void 0 : cb('hash info has been composed according to central directory records');
|
|
15
|
+
} else {
|
|
16
|
+
cb === null || cb === void 0 ? void 0 : cb('SLPK contains hash file');
|
|
17
|
+
const localFileHeader = await parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, fileProvider);
|
|
18
|
+
if (!localFileHeader) {
|
|
19
|
+
throw new Error('corrupted SLPK');
|
|
20
|
+
}
|
|
21
|
+
const fileDataOffset = localFileHeader.fileDataOffset;
|
|
22
|
+
const hashFile = await fileProvider.slice(fileDataOffset, fileDataOffset + localFileHeader.compressedSize);
|
|
23
|
+
hashData = parseHashFile(hashFile);
|
|
29
24
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
25
|
+
return new SLPKArchive(fileProvider, hashData);
|
|
26
|
+
};
|
|
27
|
+
const generateHashInfo = async fileProvider => {
|
|
28
|
+
const {
|
|
29
|
+
cdStartOffset
|
|
30
|
+
} = await parseEoCDRecord(fileProvider);
|
|
31
|
+
let cdHeader = await parseZipCDFileHeader(cdStartOffset, fileProvider);
|
|
32
|
+
const hashInfo = [];
|
|
33
|
+
while (cdHeader) {
|
|
34
|
+
hashInfo.push({
|
|
35
|
+
hash: Buffer.from(md5(cdHeader.fileName.split('\\').join('/').toLocaleLowerCase()), 'hex'),
|
|
36
|
+
offset: cdHeader.localHeaderOffset
|
|
37
|
+
});
|
|
38
|
+
cdHeader = await parseZipCDFileHeader(cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength), fileProvider);
|
|
33
39
|
}
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
40
|
+
hashInfo.sort((a, b) => compareHashes(a.hash, b.hash));
|
|
41
|
+
return hashInfo;
|
|
42
|
+
};
|
|
43
|
+
const parseHashFile = hashFile => {
|
|
44
|
+
const hashFileBuffer = Buffer.from(hashFile);
|
|
45
|
+
const hashArray = [];
|
|
46
|
+
for (let i = 0; i < hashFileBuffer.buffer.byteLength; i = i + 24) {
|
|
47
|
+
const offsetBuffer = new DataView(hashFileBuffer.buffer.slice(hashFileBuffer.byteOffset + i + 16, hashFileBuffer.byteOffset + i + 24));
|
|
48
|
+
const offset = offsetBuffer.getBigUint64(offsetBuffer.byteOffset, true);
|
|
49
|
+
hashArray.push({
|
|
50
|
+
hash: Buffer.from(hashFileBuffer.subarray(hashFileBuffer.byteOffset + i, hashFileBuffer.byteOffset + i + 16)),
|
|
51
|
+
offset
|
|
52
|
+
});
|
|
38
53
|
}
|
|
39
|
-
return
|
|
40
|
-
}
|
|
54
|
+
return hashArray;
|
|
55
|
+
};
|
|
41
56
|
//# sourceMappingURL=parse-slpk.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-slpk.js","names":["
|
|
1
|
+
{"version":3,"file":"parse-slpk.js","names":["md5","parseZipCDFileHeader","signature","cdHeaderSignature","parseEoCDRecord","parseZipLocalFileHeader","searchFromTheEnd","SLPKArchive","compareHashes","parseSLPK","fileProvider","cb","hashCDOffset","cdFileHeader","hashData","fileName","generateHashInfo","localFileHeader","localHeaderOffset","Error","fileDataOffset","hashFile","slice","compressedSize","parseHashFile","cdStartOffset","cdHeader","hashInfo","push","hash","Buffer","from","split","join","toLocaleLowerCase","offset","extraOffset","BigInt","extraFieldLength","sort","a","b","hashFileBuffer","hashArray","i","buffer","byteLength","offsetBuffer","DataView","byteOffset","getBigUint64","subarray"],"sources":["../../../../../src/lib/parsers/parse-slpk/parse-slpk.ts"],"sourcesContent":["import md5 from 'md5';\nimport {parseZipCDFileHeader, signature as cdHeaderSignature} from '../parse-zip/cd-file-header';\nimport {parseEoCDRecord} from '../parse-zip/end-of-central-directory';\nimport {FileProvider} from '../parse-zip/file-provider';\nimport {parseZipLocalFileHeader} from '../parse-zip/local-file-header';\nimport {searchFromTheEnd} from '../parse-zip/search-from-the-end';\nimport {HashElement, SLPKArchive, compareHashes} from './slpk-archieve';\n\n/**\n * Creates slpk file handler from raw file\n * @param fileProvider raw file data\n * @param cb is called with information message during parsing\n * @returns slpk file handler\n */\nexport const parseSLPK = async (\n fileProvider: FileProvider,\n cb?: (msg: string) => void\n): Promise<SLPKArchive> => {\n const hashCDOffset = await searchFromTheEnd(fileProvider, cdHeaderSignature);\n\n const cdFileHeader = await parseZipCDFileHeader(hashCDOffset, fileProvider);\n\n let hashData: HashElement[];\n if (cdFileHeader?.fileName !== '@specialIndexFileHASH128@') {\n cb?.('SLPK doesnt contain hash file');\n hashData = await generateHashInfo(fileProvider);\n cb?.('hash info has been composed according to central directory records');\n } else {\n cb?.('SLPK contains hash file');\n const localFileHeader = await parseZipLocalFileHeader(\n cdFileHeader.localHeaderOffset,\n fileProvider\n );\n if (!localFileHeader) {\n throw new Error('corrupted SLPK');\n }\n\n const fileDataOffset = localFileHeader.fileDataOffset;\n const hashFile = await fileProvider.slice(\n fileDataOffset,\n fileDataOffset + localFileHeader.compressedSize\n );\n\n hashData = parseHashFile(hashFile);\n }\n\n return new SLPKArchive(fileProvider, hashData);\n};\n\n/**\n * generates hash info from central directory\n * @param fileProvider - provider of the archive\n * @returns ready to use hash info\n */\nconst generateHashInfo = async (fileProvider: FileProvider): Promise<HashElement[]> => {\n const {cdStartOffset} = await parseEoCDRecord(fileProvider);\n let cdHeader = await parseZipCDFileHeader(cdStartOffset, fileProvider);\n const hashInfo: HashElement[] = [];\n while (cdHeader) {\n hashInfo.push({\n hash: Buffer.from(md5(cdHeader.fileName.split('\\\\').join('/').toLocaleLowerCase()), 'hex'),\n offset: cdHeader.localHeaderOffset\n });\n cdHeader = await parseZipCDFileHeader(\n cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength),\n fileProvider\n );\n }\n hashInfo.sort((a, b) => compareHashes(a.hash, b.hash));\n return hashInfo;\n};\n\n/**\n * Reads hash file from buffer and returns it in ready-to-use form\n * @param hashFile - bufer containing hash file\n * @returns Array containing file info\n */\nconst parseHashFile = (hashFile: ArrayBuffer): HashElement[] => {\n const hashFileBuffer = Buffer.from(hashFile);\n const hashArray: HashElement[] = [];\n for (let i = 0; i < hashFileBuffer.buffer.byteLength; i = i + 24) {\n const offsetBuffer = new DataView(\n hashFileBuffer.buffer.slice(\n hashFileBuffer.byteOffset + i + 16,\n hashFileBuffer.byteOffset + i + 24\n )\n );\n const offset = offsetBuffer.getBigUint64(offsetBuffer.byteOffset, true);\n hashArray.push({\n hash: Buffer.from(\n hashFileBuffer.subarray(hashFileBuffer.byteOffset + i, hashFileBuffer.byteOffset + i + 16)\n ),\n offset\n });\n }\n return hashArray;\n};\n"],"mappings":"AAAA,OAAOA,GAAG,MAAM,KAAK;AACrB,SAAQC,oBAAoB,EAAEC,SAAS,IAAIC,iBAAiB,QAAO,6BAA6B;AAChG,SAAQC,eAAe,QAAO,uCAAuC;AAErE,SAAQC,uBAAuB,QAAO,gCAAgC;AACtE,SAAQC,gBAAgB,QAAO,kCAAkC;AACjE,SAAqBC,WAAW,EAAEC,aAAa,QAAO,iBAAiB;AAQvE,OAAO,MAAMC,SAAS,GAAG,MAAAA,CACvBC,YAA0B,EAC1BC,EAA0B,KACD;EACzB,MAAMC,YAAY,GAAG,MAAMN,gBAAgB,CAACI,YAAY,EAAEP,iBAAiB,CAAC;EAE5E,MAAMU,YAAY,GAAG,MAAMZ,oBAAoB,CAACW,YAAY,EAAEF,YAAY,CAAC;EAE3E,IAAII,QAAuB;EAC3B,IAAI,CAAAD,YAAY,aAAZA,YAAY,uBAAZA,YAAY,CAAEE,QAAQ,MAAK,2BAA2B,EAAE;IAC1DJ,EAAE,aAAFA,EAAE,uBAAFA,EAAE,CAAG,+BAA+B,CAAC;IACrCG,QAAQ,GAAG,MAAME,gBAAgB,CAACN,YAAY,CAAC;IAC/CC,EAAE,aAAFA,EAAE,uBAAFA,EAAE,CAAG,oEAAoE,CAAC;EAC5E,CAAC,MAAM;IACLA,EAAE,aAAFA,EAAE,uBAAFA,EAAE,CAAG,yBAAyB,CAAC;IAC/B,MAAMM,eAAe,GAAG,MAAMZ,uBAAuB,CACnDQ,YAAY,CAACK,iBAAiB,EAC9BR,YACF,CAAC;IACD,IAAI,CAACO,eAAe,EAAE;MACpB,MAAM,IAAIE,KAAK,CAAC,gBAAgB,CAAC;IACnC;IAEA,MAAMC,cAAc,GAAGH,eAAe,CAACG,cAAc;IACrD,MAAMC,QAAQ,GAAG,MAAMX,YAAY,CAACY,KAAK,CACvCF,cAAc,EACdA,cAAc,GAAGH,eAAe,CAACM,cACnC,CAAC;IAEDT,QAAQ,GAAGU,aAAa,CAACH,QAAQ,CAAC;EACpC;EAEA,OAAO,IAAId,WAAW,CAACG,YAAY,EAAEI,QAAQ,CAAC;AAChD,CAAC;AAOD,MAAME,gBAAgB,GAAG,MAAON,YAA0B,IAA6B;EACrF,MAAM;IAACe;EAAa,CAAC,GAAG,MAAMrB,eAAe,CAACM,YAAY,CAAC;EAC3D,IAAIgB,QAAQ,GAAG,MAAMzB,oBAAoB,CAACwB,aAAa,EAAEf,YAAY,CAAC;EACtE,MAAMiB,QAAuB,GAAG,EAAE;EAClC,OAAOD,QAAQ,EAAE;IACfC,QAAQ,CAACC,IAAI,CAAC;MACZC,IAAI,EAAEC,MAAM,CAACC,IAAI,CAAC/B,GAAG,CAAC0B,QAAQ,CAACX,QAAQ,CAACiB,KAAK,CAAC,IAAI,CAAC,CAACC,IAAI,CAAC,GAAG,CAAC,CAACC,iBAAiB,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC;MAC1FC,MAAM,EAAET,QAAQ,CAACR;IACnB,CAAC,CAAC;IACFQ,QAAQ,GAAG,MAAMzB,oBAAoB,CACnCyB,QAAQ,CAACU,WAAW,GAAGC,MAAM,CAACX,QAAQ,CAACY,gBAAgB,CAAC,EACxD5B,YACF,CAAC;EACH;EACAiB,QAAQ,CAACY,IAAI,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAKjC,aAAa,CAACgC,CAAC,CAACX,IAAI,EAAEY,CAAC,CAACZ,IAAI,CAAC,CAAC;EACtD,OAAOF,QAAQ;AACjB,CAAC;AAOD,MAAMH,aAAa,GAAIH,QAAqB,IAAoB;EAC9D,MAAMqB,cAAc,GAAGZ,MAAM,CAACC,IAAI,CAACV,QAAQ,CAAC;EAC5C,MAAMsB,SAAwB,GAAG,EAAE;EACnC,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,cAAc,CAACG,MAAM,CAACC,UAAU,EAAEF,CAAC,GAAGA,CAAC,GAAG,EAAE,EAAE;IAChE,MAAMG,YAAY,GAAG,IAAIC,QAAQ,CAC/BN,cAAc,CAACG,MAAM,CAACvB,KAAK,CACzBoB,cAAc,CAACO,UAAU,GAAGL,CAAC,GAAG,EAAE,EAClCF,cAAc,CAACO,UAAU,GAAGL,CAAC,GAAG,EAClC,CACF,CAAC;IACD,MAAMT,MAAM,GAAGY,YAAY,CAACG,YAAY,CAACH,YAAY,CAACE,UAAU,EAAE,IAAI,CAAC;IACvEN,SAAS,CAACf,IAAI,CAAC;MACbC,IAAI,EAAEC,MAAM,CAACC,IAAI,CACfW,cAAc,CAACS,QAAQ,CAACT,cAAc,CAACO,UAAU,GAAGL,CAAC,EAAEF,cAAc,CAACO,UAAU,GAAGL,CAAC,GAAG,EAAE,CAC3F,CAAC;MACDT;IACF,CAAC,CAAC;EACJ;EACA,OAAOQ,SAAS;AAClB,CAAC"}
|
|
@@ -1,8 +1,18 @@
|
|
|
1
1
|
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
2
|
import md5 from 'md5';
|
|
3
3
|
import { parseZipLocalFileHeader } from '../parse-zip/local-file-header';
|
|
4
|
-
import { DataViewFileProvider } from '../parse-zip/buffer-file-provider';
|
|
5
4
|
import { GZipCompression } from '@loaders.gl/compression';
|
|
5
|
+
export const compareHashes = (hash1, hash2) => {
|
|
6
|
+
const h1 = new BigUint64Array(hash1.buffer, hash1.byteOffset, 2);
|
|
7
|
+
const h2 = new BigUint64Array(hash2.buffer, hash2.byteOffset, 2);
|
|
8
|
+
const diff = h1[0] === h2[0] ? h1[1] - h2[1] : h1[0] - h2[0];
|
|
9
|
+
if (diff < 0n) {
|
|
10
|
+
return -1;
|
|
11
|
+
} else if (diff === 0n) {
|
|
12
|
+
return 0;
|
|
13
|
+
}
|
|
14
|
+
return 1;
|
|
15
|
+
};
|
|
6
16
|
const PATH_DESCRIPTIONS = [{
|
|
7
17
|
test: /^$/,
|
|
8
18
|
extensions: ['3dSceneLayer.json.gz']
|
|
@@ -10,7 +20,7 @@ const PATH_DESCRIPTIONS = [{
|
|
|
10
20
|
test: /^nodepages\/\d+$/,
|
|
11
21
|
extensions: ['.json.gz']
|
|
12
22
|
}, {
|
|
13
|
-
test: /^nodes
|
|
23
|
+
test: /^nodes\/(\d+|root)$/,
|
|
14
24
|
extensions: ['/3dNodeIndexDocument.json.gz']
|
|
15
25
|
}, {
|
|
16
26
|
test: /^nodes\/\d+\/textures\/.+$/,
|
|
@@ -29,24 +39,27 @@ const PATH_DESCRIPTIONS = [{
|
|
|
29
39
|
extensions: ['/sharedResource.json.gz']
|
|
30
40
|
}];
|
|
31
41
|
export class SLPKArchive {
|
|
32
|
-
constructor(
|
|
42
|
+
constructor(slpkArchive, hashFile) {
|
|
33
43
|
_defineProperty(this, "slpkArchive", void 0);
|
|
34
44
|
_defineProperty(this, "hashArray", void 0);
|
|
35
|
-
this
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
45
|
+
_defineProperty(this, "findBin", hashToSearch => {
|
|
46
|
+
let lowerBorder = 0;
|
|
47
|
+
let upperBorder = this.hashArray.length;
|
|
48
|
+
while (upperBorder - lowerBorder > 1) {
|
|
49
|
+
const middle = lowerBorder + Math.floor((upperBorder - lowerBorder) / 2);
|
|
50
|
+
const value = compareHashes(this.hashArray[middle].hash, hashToSearch);
|
|
51
|
+
if (value === 0) {
|
|
52
|
+
return this.hashArray[middle];
|
|
53
|
+
} else if (value < 0) {
|
|
54
|
+
lowerBorder = middle;
|
|
55
|
+
} else {
|
|
56
|
+
upperBorder = middle;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return undefined;
|
|
60
|
+
});
|
|
61
|
+
this.slpkArchive = slpkArchive;
|
|
62
|
+
this.hashArray = hashFile;
|
|
50
63
|
}
|
|
51
64
|
async getFile(path) {
|
|
52
65
|
let mode = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'raw';
|
|
@@ -79,7 +92,10 @@ export class SLPKArchive {
|
|
|
79
92
|
throw new Error('No such file in the archieve');
|
|
80
93
|
}
|
|
81
94
|
async getDataByPath(path) {
|
|
82
|
-
|
|
95
|
+
let data = await this.getFileBytes(path.toLocaleLowerCase());
|
|
96
|
+
if (!data) {
|
|
97
|
+
data = await this.getFileBytes(path);
|
|
98
|
+
}
|
|
83
99
|
if (!data) {
|
|
84
100
|
return undefined;
|
|
85
101
|
}
|
|
@@ -92,15 +108,15 @@ export class SLPKArchive {
|
|
|
92
108
|
}
|
|
93
109
|
async getFileBytes(path) {
|
|
94
110
|
const nameHash = Buffer.from(md5(path), 'hex');
|
|
95
|
-
const fileInfo = this.
|
|
111
|
+
const fileInfo = this.findBin(nameHash);
|
|
96
112
|
if (!fileInfo) {
|
|
97
113
|
return undefined;
|
|
98
114
|
}
|
|
99
|
-
const localFileHeader = await parseZipLocalFileHeader(
|
|
115
|
+
const localFileHeader = await parseZipLocalFileHeader(fileInfo.offset, this.slpkArchive);
|
|
100
116
|
if (!localFileHeader) {
|
|
101
117
|
return undefined;
|
|
102
118
|
}
|
|
103
|
-
const compressedFile = this.slpkArchive.
|
|
119
|
+
const compressedFile = this.slpkArchive.slice(localFileHeader.fileDataOffset, localFileHeader.fileDataOffset + localFileHeader.compressedSize);
|
|
104
120
|
return compressedFile;
|
|
105
121
|
}
|
|
106
122
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"slpk-archieve.js","names":["md5","parseZipLocalFileHeader","DataViewFileProvider","GZipCompression","PATH_DESCRIPTIONS","test","extensions","SLPKArchive","constructor","slpkArchiveBuffer","hashFile","_defineProperty","slpkArchive","DataView","hashArray","parseHashFile","hashFileBuffer","Buffer","from","i","buffer","byteLength","offsetBuffer","slice","byteOffset","offset","getUint32","push","hash","subarray","getFile","path","mode","arguments","length","undefined","_PATH_DESCRIPTIONS$fi","find","val","data","ext","getDataByPath","concat","decompressedFile","fileWithoutCompression","getFileBytes","Error","compression","decompressedData","decompress","nameHash","fileInfo","compare","localFileHeader","compressedFile","fileDataOffset","compressedSize"],"sources":["../../../../../src/lib/parsers/parse-slpk/slpk-archieve.ts"],"sourcesContent":["import md5 from 'md5';\nimport {parseZipLocalFileHeader} from '../parse-zip/local-file-header';\nimport {DataViewFileProvider} from '../parse-zip/buffer-file-provider';\nimport {GZipCompression} from '@loaders.gl/compression';\n\n/** Element of hash array */\ntype HashElement = {\n /**\n * File name hash\n */\n hash: Buffer;\n /**\n * File offset in the archive\n */\n offset: number;\n};\n\n/** Description of real paths for different file types */\nconst PATH_DESCRIPTIONS: {test: RegExp; extensions: string[]}[] = [\n {\n test: /^$/,\n extensions: ['3dSceneLayer.json.gz']\n },\n {\n test: /^nodepages\\/\\d+$/,\n extensions: ['.json.gz']\n },\n {\n test: /^nodes\\/\\d+$/,\n extensions: ['/3dNodeIndexDocument.json.gz']\n },\n {\n test: /^nodes\\/\\d+\\/textures\\/.+$/,\n extensions: ['.jpg', '.png', '.bin.dds.gz', '.ktx']\n },\n {\n test: /^nodes\\/\\d+\\/geometries\\/\\d+$/,\n extensions: ['.bin.gz', '.draco.gz']\n },\n {\n test: /^nodes\\/\\d+\\/attributes\\/f_\\d+\\/\\d+$/,\n extensions: ['.bin.gz']\n },\n {\n test: /^statistics\\/f_\\d+\\/\\d+$/,\n extensions: ['.json.gz']\n },\n {\n test: /^nodes\\/\\d+\\/shared$/,\n extensions: ['/sharedResource.json.gz']\n }\n];\n\n/**\n * Class for handling information about slpk file\n */\nexport class SLPKArchive {\n slpkArchive: DataView;\n hashArray: {hash: Buffer; offset: number}[];\n constructor(slpkArchiveBuffer: ArrayBuffer, hashFile: ArrayBuffer) {\n this.slpkArchive = new DataView(slpkArchiveBuffer);\n this.hashArray = this.parseHashFile(hashFile);\n }\n\n /**\n * Reads hash file from buffer and returns it in ready-to-use form\n * @param hashFile - bufer containing hash file\n * @returns Array containing file info\n */\n private parseHashFile(hashFile: ArrayBuffer): HashElement[] {\n const hashFileBuffer = Buffer.from(hashFile);\n const hashArray: HashElement[] = [];\n for (let i = 0; i < hashFileBuffer.buffer.byteLength; i = i + 24) {\n const offsetBuffer = new DataView(\n hashFileBuffer.buffer.slice(\n hashFileBuffer.byteOffset + i + 16,\n hashFileBuffer.byteOffset + i + 24\n )\n );\n const offset = offsetBuffer.getUint32(offsetBuffer.byteOffset, true);\n hashArray.push({\n hash: Buffer.from(\n hashFileBuffer.subarray(hashFileBuffer.byteOffset + i, hashFileBuffer.byteOffset + i + 16)\n ),\n offset\n });\n }\n return hashArray;\n }\n\n /**\n * Returns file with the given path from slpk archive\n * @param path - path inside the slpk\n * @param mode - currently only raw mode supported\n * @returns buffer with ready to use file\n */\n async getFile(path: string, mode: 'http' | 'raw' = 'raw'): Promise<Buffer> {\n if (mode === 'http') {\n const extensions = PATH_DESCRIPTIONS.find((val) => val.test.test(path))?.extensions;\n if (extensions) {\n let data: ArrayBuffer | undefined;\n for (const ext of extensions) {\n data = await this.getDataByPath(`${path}${ext}`);\n if (data) {\n break;\n }\n }\n if (data) {\n return Buffer.from(data);\n }\n }\n }\n if (mode === 'raw') {\n const decompressedFile = await this.getDataByPath(`${path}.gz`);\n if (decompressedFile) {\n return Buffer.from(decompressedFile);\n }\n const fileWithoutCompression = await this.getFileBytes(path);\n if (fileWithoutCompression) {\n return Buffer.from(fileWithoutCompression);\n }\n }\n\n throw new Error('No such file in the archieve');\n }\n\n /**\n * returning uncompressed data for paths that ends with .gz and raw data for all other paths\n * @param path - path inside the archive\n * @returns buffer with the file data\n */\n private async getDataByPath(path: string): Promise<ArrayBuffer | undefined> {\n const data = await this.getFileBytes(path);\n if (!data) {\n return undefined;\n }\n if (/\\.gz$/.test(path)) {\n const compression = new GZipCompression();\n\n const decompressedData = await compression.decompress(data);\n return decompressedData;\n }\n return Buffer.from(data);\n }\n\n /**\n * Trying to get raw file data by adress\n * @param path - path inside the archive\n * @returns buffer with the raw file data\n */\n private async getFileBytes(path: string): Promise<ArrayBuffer | undefined> {\n const nameHash = Buffer.from(md5(path), 'hex');\n const fileInfo = this.hashArray.find((val) => Buffer.compare(val.hash, nameHash) === 0);\n if (!fileInfo) {\n return undefined;\n }\n\n const localFileHeader = await parseZipLocalFileHeader(\n this.slpkArchive.byteOffset + fileInfo?.offset,\n new DataViewFileProvider(this.slpkArchive)\n );\n if (!localFileHeader) {\n return undefined;\n }\n\n const compressedFile = this.slpkArchive.buffer.slice(\n localFileHeader.fileDataOffset,\n localFileHeader.fileDataOffset + localFileHeader.compressedSize\n );\n\n return compressedFile;\n }\n}\n"],"mappings":";AAAA,OAAOA,GAAG,MAAM,KAAK;AACrB,SAAQC,uBAAuB,QAAO,gCAAgC;AACtE,SAAQC,oBAAoB,QAAO,mCAAmC;AACtE,SAAQC,eAAe,QAAO,yBAAyB;AAevD,MAAMC,iBAAyD,GAAG,CAChE;EACEC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE,CAAC,sBAAsB;AACrC,CAAC,EACD;EACED,IAAI,EAAE,kBAAkB;EACxBC,UAAU,EAAE,CAAC,UAAU;AACzB,CAAC,EACD;EACED,IAAI,EAAE,cAAc;EACpBC,UAAU,EAAE,CAAC,8BAA8B;AAC7C,CAAC,EACD;EACED,IAAI,EAAE,4BAA4B;EAClCC,UAAU,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM;AACpD,CAAC,EACD;EACED,IAAI,EAAE,+BAA+B;EACrCC,UAAU,EAAE,CAAC,SAAS,EAAE,WAAW;AACrC,CAAC,EACD;EACED,IAAI,EAAE,sCAAsC;EAC5CC,UAAU,EAAE,CAAC,SAAS;AACxB,CAAC,EACD;EACED,IAAI,EAAE,0BAA0B;EAChCC,UAAU,EAAE,CAAC,UAAU;AACzB,CAAC,EACD;EACED,IAAI,EAAE,sBAAsB;EAC5BC,UAAU,EAAE,CAAC,yBAAyB;AACxC,CAAC,CACF;AAKD,OAAO,MAAMC,WAAW,CAAC;EAGvBC,WAAWA,CAACC,iBAA8B,EAAEC,QAAqB,EAAE;IAAAC,eAAA;IAAAA,eAAA;IACjE,IAAI,CAACC,WAAW,GAAG,IAAIC,QAAQ,CAACJ,iBAAiB,CAAC;IAClD,IAAI,CAACK,SAAS,GAAG,IAAI,CAACC,aAAa,CAACL,QAAQ,CAAC;EAC/C;EAOQK,aAAaA,CAACL,QAAqB,EAAiB;IAC1D,MAAMM,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACR,QAAQ,CAAC;IAC5C,MAAMI,SAAwB,GAAG,EAAE;IACnC,KAAK,IAAIK,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,cAAc,CAACI,MAAM,CAACC,UAAU,EAAEF,CAAC,GAAGA,CAAC,GAAG,EAAE,EAAE;MAChE,MAAMG,YAAY,GAAG,IAAIT,QAAQ,CAC/BG,cAAc,CAACI,MAAM,CAACG,KAAK,CACzBP,cAAc,CAACQ,UAAU,GAAGL,CAAC,GAAG,EAAE,EAClCH,cAAc,CAACQ,UAAU,GAAGL,CAAC,GAAG,EAClC,CACF,CAAC;MACD,MAAMM,MAAM,GAAGH,YAAY,CAACI,SAAS,CAACJ,YAAY,CAACE,UAAU,EAAE,IAAI,CAAC;MACpEV,SAAS,CAACa,IAAI,CAAC;QACbC,IAAI,EAAEX,MAAM,CAACC,IAAI,CACfF,cAAc,CAACa,QAAQ,CAACb,cAAc,CAACQ,UAAU,GAAGL,CAAC,EAAEH,cAAc,CAACQ,UAAU,GAAGL,CAAC,GAAG,EAAE,CAC3F,CAAC;QACDM;MACF,CAAC,CAAC;IACJ;IACA,OAAOX,SAAS;EAClB;EAQA,MAAMgB,OAAOA,CAACC,IAAY,EAAiD;IAAA,IAA/CC,IAAoB,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,KAAK;IACtD,IAAID,IAAI,KAAK,MAAM,EAAE;MAAA,IAAAI,qBAAA;MACnB,MAAM9B,UAAU,IAAA8B,qBAAA,GAAGhC,iBAAiB,CAACiC,IAAI,CAAEC,GAAG,IAAKA,GAAG,CAACjC,IAAI,CAACA,IAAI,CAAC0B,IAAI,CAAC,CAAC,cAAAK,qBAAA,uBAApDA,qBAAA,CAAsD9B,UAAU;MACnF,IAAIA,UAAU,EAAE;QACd,IAAIiC,IAA6B;QACjC,KAAK,MAAMC,GAAG,IAAIlC,UAAU,EAAE;UAC5BiC,IAAI,GAAG,MAAM,IAAI,CAACE,aAAa,IAAAC,MAAA,CAAIX,IAAI,EAAAW,MAAA,CAAGF,GAAG,CAAE,CAAC;UAChD,IAAID,IAAI,EAAE;YACR;UACF;QACF;QACA,IAAIA,IAAI,EAAE;UACR,OAAOtB,MAAM,CAACC,IAAI,CAACqB,IAAI,CAAC;QAC1B;MACF;IACF;IACA,IAAIP,IAAI,KAAK,KAAK,EAAE;MAClB,MAAMW,gBAAgB,GAAG,MAAM,IAAI,CAACF,aAAa,IAAAC,MAAA,CAAIX,IAAI,QAAK,CAAC;MAC/D,IAAIY,gBAAgB,EAAE;QACpB,OAAO1B,MAAM,CAACC,IAAI,CAACyB,gBAAgB,CAAC;MACtC;MACA,MAAMC,sBAAsB,GAAG,MAAM,IAAI,CAACC,YAAY,CAACd,IAAI,CAAC;MAC5D,IAAIa,sBAAsB,EAAE;QAC1B,OAAO3B,MAAM,CAACC,IAAI,CAAC0B,sBAAsB,CAAC;MAC5C;IACF;IAEA,MAAM,IAAIE,KAAK,CAAC,8BAA8B,CAAC;EACjD;EAOA,MAAcL,aAAaA,CAACV,IAAY,EAAoC;IAC1E,MAAMQ,IAAI,GAAG,MAAM,IAAI,CAACM,YAAY,CAACd,IAAI,CAAC;IAC1C,IAAI,CAACQ,IAAI,EAAE;MACT,OAAOJ,SAAS;IAClB;IACA,IAAI,OAAO,CAAC9B,IAAI,CAAC0B,IAAI,CAAC,EAAE;MACtB,MAAMgB,WAAW,GAAG,IAAI5C,eAAe,CAAC,CAAC;MAEzC,MAAM6C,gBAAgB,GAAG,MAAMD,WAAW,CAACE,UAAU,CAACV,IAAI,CAAC;MAC3D,OAAOS,gBAAgB;IACzB;IACA,OAAO/B,MAAM,CAACC,IAAI,CAACqB,IAAI,CAAC;EAC1B;EAOA,MAAcM,YAAYA,CAACd,IAAY,EAAoC;IACzE,MAAMmB,QAAQ,GAAGjC,MAAM,CAACC,IAAI,CAAClB,GAAG,CAAC+B,IAAI,CAAC,EAAE,KAAK,CAAC;IAC9C,MAAMoB,QAAQ,GAAG,IAAI,CAACrC,SAAS,CAACuB,IAAI,CAAEC,GAAG,IAAKrB,MAAM,CAACmC,OAAO,CAACd,GAAG,CAACV,IAAI,EAAEsB,QAAQ,CAAC,KAAK,CAAC,CAAC;IACvF,IAAI,CAACC,QAAQ,EAAE;MACb,OAAOhB,SAAS;IAClB;IAEA,MAAMkB,eAAe,GAAG,MAAMpD,uBAAuB,CACnD,IAAI,CAACW,WAAW,CAACY,UAAU,IAAG2B,QAAQ,aAARA,QAAQ,uBAARA,QAAQ,CAAE1B,MAAM,GAC9C,IAAIvB,oBAAoB,CAAC,IAAI,CAACU,WAAW,CAC3C,CAAC;IACD,IAAI,CAACyC,eAAe,EAAE;MACpB,OAAOlB,SAAS;IAClB;IAEA,MAAMmB,cAAc,GAAG,IAAI,CAAC1C,WAAW,CAACQ,MAAM,CAACG,KAAK,CAClD8B,eAAe,CAACE,cAAc,EAC9BF,eAAe,CAACE,cAAc,GAAGF,eAAe,CAACG,cACnD,CAAC;IAED,OAAOF,cAAc;EACvB;AACF"}
|
|
1
|
+
{"version":3,"file":"slpk-archieve.js","names":["md5","parseZipLocalFileHeader","GZipCompression","compareHashes","hash1","hash2","h1","BigUint64Array","buffer","byteOffset","h2","diff","PATH_DESCRIPTIONS","test","extensions","SLPKArchive","constructor","slpkArchive","hashFile","_defineProperty","hashToSearch","lowerBorder","upperBorder","hashArray","length","middle","Math","floor","value","hash","undefined","getFile","path","mode","arguments","_PATH_DESCRIPTIONS$fi","find","val","data","ext","getDataByPath","concat","Buffer","from","decompressedFile","fileWithoutCompression","getFileBytes","Error","toLocaleLowerCase","compression","decompressedData","decompress","nameHash","fileInfo","findBin","localFileHeader","offset","compressedFile","slice","fileDataOffset","compressedSize"],"sources":["../../../../../src/lib/parsers/parse-slpk/slpk-archieve.ts"],"sourcesContent":["import md5 from 'md5';\nimport {parseZipLocalFileHeader} from '../parse-zip/local-file-header';\nimport {GZipCompression} from '@loaders.gl/compression';\nimport {FileProvider} from '../parse-zip/file-provider';\n\n/** Element of hash array */\nexport type HashElement = {\n /** File name hash */\n hash: Buffer;\n /** File offset in the archive */\n offset: bigint;\n};\n\n/**\n * Comparing md5 hashes according to https://github.com/Esri/i3s-spec/blob/master/docs/2.0/slpk_hashtable.pcsl.md step 5\n * @param hash1 hash to compare\n * @param hash2 hash to compare\n * @returns -1 if hash1 < hash2, 0 of hash1 === hash2, 1 if hash1 > hash2\n */\nexport const compareHashes = (hash1: Buffer, hash2: Buffer): number => {\n const h1 = new BigUint64Array(hash1.buffer, hash1.byteOffset, 2);\n const h2 = new BigUint64Array(hash2.buffer, hash2.byteOffset, 2);\n\n const diff = h1[0] === h2[0] ? h1[1] - h2[1] : h1[0] - h2[0];\n\n if (diff < 0n) {\n return -1;\n } else if (diff === 0n) {\n return 0;\n }\n return 1;\n};\n\n/** Description of real paths for different file types */\nconst PATH_DESCRIPTIONS: {test: RegExp; extensions: string[]}[] = [\n {\n test: /^$/,\n extensions: ['3dSceneLayer.json.gz']\n },\n {\n test: /^nodepages\\/\\d+$/,\n extensions: ['.json.gz']\n },\n {\n test: /^nodes\\/(\\d+|root)$/,\n extensions: ['/3dNodeIndexDocument.json.gz']\n },\n {\n test: /^nodes\\/\\d+\\/textures\\/.+$/,\n extensions: ['.jpg', '.png', '.bin.dds.gz', '.ktx']\n },\n {\n test: /^nodes\\/\\d+\\/geometries\\/\\d+$/,\n extensions: ['.bin.gz', '.draco.gz']\n },\n {\n test: /^nodes\\/\\d+\\/attributes\\/f_\\d+\\/\\d+$/,\n extensions: ['.bin.gz']\n },\n {\n test: /^statistics\\/f_\\d+\\/\\d+$/,\n extensions: ['.json.gz']\n },\n {\n test: /^nodes\\/\\d+\\/shared$/,\n extensions: ['/sharedResource.json.gz']\n }\n];\n\n/**\n * Class for handling information about slpk file\n */\nexport class SLPKArchive {\n private slpkArchive: FileProvider;\n private hashArray: HashElement[];\n constructor(slpkArchive: FileProvider, hashFile: HashElement[]) {\n this.slpkArchive = slpkArchive;\n this.hashArray = hashFile;\n }\n\n /**\n * Binary search in the hash info\n * @param hashToSearch hash that we need to find\n * @returns required hash element or undefined if not found\n */\n private findBin = (hashToSearch: Buffer): HashElement | undefined => {\n let lowerBorder = 0;\n let upperBorder = this.hashArray.length;\n\n while (upperBorder - lowerBorder > 1) {\n const middle = lowerBorder + Math.floor((upperBorder - lowerBorder) / 2);\n const value = compareHashes(this.hashArray[middle].hash, hashToSearch);\n if (value === 0) {\n return this.hashArray[middle];\n } else if (value < 0) {\n lowerBorder = middle;\n } else {\n upperBorder = middle;\n }\n }\n return undefined;\n };\n\n /**\n * Returns file with the given path from slpk archive\n * @param path - path inside the slpk\n * @param mode - currently only raw mode supported\n * @returns buffer with ready to use file\n */\n async getFile(path: string, mode: 'http' | 'raw' = 'raw'): Promise<Buffer> {\n if (mode === 'http') {\n const extensions = PATH_DESCRIPTIONS.find((val) => val.test.test(path))?.extensions;\n if (extensions) {\n let data: ArrayBuffer | undefined;\n for (const ext of extensions) {\n data = await this.getDataByPath(`${path}${ext}`);\n if (data) {\n break;\n }\n }\n if (data) {\n return Buffer.from(data);\n }\n }\n }\n if (mode === 'raw') {\n const decompressedFile = await this.getDataByPath(`${path}.gz`);\n if (decompressedFile) {\n return Buffer.from(decompressedFile);\n }\n const fileWithoutCompression = await this.getFileBytes(path);\n if (fileWithoutCompression) {\n return Buffer.from(fileWithoutCompression);\n }\n }\n\n throw new Error('No such file in the archieve');\n }\n\n /**\n * returning uncompressed data for paths that ends with .gz and raw data for all other paths\n * @param path - path inside the archive\n * @returns buffer with the file data\n */\n private async getDataByPath(path: string): Promise<ArrayBuffer | undefined> {\n // sometimes paths are not in lower case when hash file is created,\n // so first we're looking for lower case file name and then for original one\n let data = await this.getFileBytes(path.toLocaleLowerCase());\n if (!data) {\n data = await this.getFileBytes(path);\n }\n if (!data) {\n return undefined;\n }\n if (/\\.gz$/.test(path)) {\n const compression = new GZipCompression();\n\n const decompressedData = await compression.decompress(data);\n return decompressedData;\n }\n return Buffer.from(data);\n }\n\n /**\n * Trying to get raw file data by adress\n * @param path - path inside the archive\n * @returns buffer with the raw file data\n */\n private async getFileBytes(path: string): Promise<ArrayBuffer | undefined> {\n const nameHash = Buffer.from(md5(path), 'hex');\n const fileInfo = this.findBin(nameHash); // implement binary search\n if (!fileInfo) {\n return undefined;\n }\n\n const localFileHeader = await parseZipLocalFileHeader(fileInfo.offset, this.slpkArchive);\n if (!localFileHeader) {\n return undefined;\n }\n\n const compressedFile = this.slpkArchive.slice(\n localFileHeader.fileDataOffset,\n localFileHeader.fileDataOffset + localFileHeader.compressedSize\n );\n\n return compressedFile;\n }\n}\n"],"mappings":";AAAA,OAAOA,GAAG,MAAM,KAAK;AACrB,SAAQC,uBAAuB,QAAO,gCAAgC;AACtE,SAAQC,eAAe,QAAO,yBAAyB;AAiBvD,OAAO,MAAMC,aAAa,GAAGA,CAACC,KAAa,EAAEC,KAAa,KAAa;EACrE,MAAMC,EAAE,GAAG,IAAIC,cAAc,CAACH,KAAK,CAACI,MAAM,EAAEJ,KAAK,CAACK,UAAU,EAAE,CAAC,CAAC;EAChE,MAAMC,EAAE,GAAG,IAAIH,cAAc,CAACF,KAAK,CAACG,MAAM,EAAEH,KAAK,CAACI,UAAU,EAAE,CAAC,CAAC;EAEhE,MAAME,IAAI,GAAGL,EAAE,CAAC,CAAC,CAAC,KAAKI,EAAE,CAAC,CAAC,CAAC,GAAGJ,EAAE,CAAC,CAAC,CAAC,GAAGI,EAAE,CAAC,CAAC,CAAC,GAAGJ,EAAE,CAAC,CAAC,CAAC,GAAGI,EAAE,CAAC,CAAC,CAAC;EAE5D,IAAIC,IAAI,GAAG,EAAE,EAAE;IACb,OAAO,CAAC,CAAC;EACX,CAAC,MAAM,IAAIA,IAAI,KAAK,EAAE,EAAE;IACtB,OAAO,CAAC;EACV;EACA,OAAO,CAAC;AACV,CAAC;AAGD,MAAMC,iBAAyD,GAAG,CAChE;EACEC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE,CAAC,sBAAsB;AACrC,CAAC,EACD;EACED,IAAI,EAAE,kBAAkB;EACxBC,UAAU,EAAE,CAAC,UAAU;AACzB,CAAC,EACD;EACED,IAAI,EAAE,qBAAqB;EAC3BC,UAAU,EAAE,CAAC,8BAA8B;AAC7C,CAAC,EACD;EACED,IAAI,EAAE,4BAA4B;EAClCC,UAAU,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM;AACpD,CAAC,EACD;EACED,IAAI,EAAE,+BAA+B;EACrCC,UAAU,EAAE,CAAC,SAAS,EAAE,WAAW;AACrC,CAAC,EACD;EACED,IAAI,EAAE,sCAAsC;EAC5CC,UAAU,EAAE,CAAC,SAAS;AACxB,CAAC,EACD;EACED,IAAI,EAAE,0BAA0B;EAChCC,UAAU,EAAE,CAAC,UAAU;AACzB,CAAC,EACD;EACED,IAAI,EAAE,sBAAsB;EAC5BC,UAAU,EAAE,CAAC,yBAAyB;AACxC,CAAC,CACF;AAKD,OAAO,MAAMC,WAAW,CAAC;EAGvBC,WAAWA,CAACC,WAAyB,EAAEC,QAAuB,EAAE;IAAAC,eAAA;IAAAA,eAAA;IAAAA,eAAA,kBAU7CC,YAAoB,IAA8B;MACnE,IAAIC,WAAW,GAAG,CAAC;MACnB,IAAIC,WAAW,GAAG,IAAI,CAACC,SAAS,CAACC,MAAM;MAEvC,OAAOF,WAAW,GAAGD,WAAW,GAAG,CAAC,EAAE;QACpC,MAAMI,MAAM,GAAGJ,WAAW,GAAGK,IAAI,CAACC,KAAK,CAAC,CAACL,WAAW,GAAGD,WAAW,IAAI,CAAC,CAAC;QACxE,MAAMO,KAAK,GAAGzB,aAAa,CAAC,IAAI,CAACoB,SAAS,CAACE,MAAM,CAAC,CAACI,IAAI,EAAET,YAAY,CAAC;QACtE,IAAIQ,KAAK,KAAK,CAAC,EAAE;UACf,OAAO,IAAI,CAACL,SAAS,CAACE,MAAM,CAAC;QAC/B,CAAC,MAAM,IAAIG,KAAK,GAAG,CAAC,EAAE;UACpBP,WAAW,GAAGI,MAAM;QACtB,CAAC,MAAM;UACLH,WAAW,GAAGG,MAAM;QACtB;MACF;MACA,OAAOK,SAAS;IAClB,CAAC;IAzBC,IAAI,CAACb,WAAW,GAAGA,WAAW;IAC9B,IAAI,CAACM,SAAS,GAAGL,QAAQ;EAC3B;EA+BA,MAAMa,OAAOA,CAACC,IAAY,EAAiD;IAAA,IAA/CC,IAAoB,GAAAC,SAAA,CAAAV,MAAA,QAAAU,SAAA,QAAAJ,SAAA,GAAAI,SAAA,MAAG,KAAK;IACtD,IAAID,IAAI,KAAK,MAAM,EAAE;MAAA,IAAAE,qBAAA;MACnB,MAAMrB,UAAU,IAAAqB,qBAAA,GAAGvB,iBAAiB,CAACwB,IAAI,CAAEC,GAAG,IAAKA,GAAG,CAACxB,IAAI,CAACA,IAAI,CAACmB,IAAI,CAAC,CAAC,cAAAG,qBAAA,uBAApDA,qBAAA,CAAsDrB,UAAU;MACnF,IAAIA,UAAU,EAAE;QACd,IAAIwB,IAA6B;QACjC,KAAK,MAAMC,GAAG,IAAIzB,UAAU,EAAE;UAC5BwB,IAAI,GAAG,MAAM,IAAI,CAACE,aAAa,IAAAC,MAAA,CAAIT,IAAI,EAAAS,MAAA,CAAGF,GAAG,CAAE,CAAC;UAChD,IAAID,IAAI,EAAE;YACR;UACF;QACF;QACA,IAAIA,IAAI,EAAE;UACR,OAAOI,MAAM,CAACC,IAAI,CAACL,IAAI,CAAC;QAC1B;MACF;IACF;IACA,IAAIL,IAAI,KAAK,KAAK,EAAE;MAClB,MAAMW,gBAAgB,GAAG,MAAM,IAAI,CAACJ,aAAa,IAAAC,MAAA,CAAIT,IAAI,QAAK,CAAC;MAC/D,IAAIY,gBAAgB,EAAE;QACpB,OAAOF,MAAM,CAACC,IAAI,CAACC,gBAAgB,CAAC;MACtC;MACA,MAAMC,sBAAsB,GAAG,MAAM,IAAI,CAACC,YAAY,CAACd,IAAI,CAAC;MAC5D,IAAIa,sBAAsB,EAAE;QAC1B,OAAOH,MAAM,CAACC,IAAI,CAACE,sBAAsB,CAAC;MAC5C;IACF;IAEA,MAAM,IAAIE,KAAK,CAAC,8BAA8B,CAAC;EACjD;EAOA,MAAcP,aAAaA,CAACR,IAAY,EAAoC;IAG1E,IAAIM,IAAI,GAAG,MAAM,IAAI,CAACQ,YAAY,CAACd,IAAI,CAACgB,iBAAiB,CAAC,CAAC,CAAC;IAC5D,IAAI,CAACV,IAAI,EAAE;MACTA,IAAI,GAAG,MAAM,IAAI,CAACQ,YAAY,CAACd,IAAI,CAAC;IACtC;IACA,IAAI,CAACM,IAAI,EAAE;MACT,OAAOR,SAAS;IAClB;IACA,IAAI,OAAO,CAACjB,IAAI,CAACmB,IAAI,CAAC,EAAE;MACtB,MAAMiB,WAAW,GAAG,IAAI/C,eAAe,CAAC,CAAC;MAEzC,MAAMgD,gBAAgB,GAAG,MAAMD,WAAW,CAACE,UAAU,CAACb,IAAI,CAAC;MAC3D,OAAOY,gBAAgB;IACzB;IACA,OAAOR,MAAM,CAACC,IAAI,CAACL,IAAI,CAAC;EAC1B;EAOA,MAAcQ,YAAYA,CAACd,IAAY,EAAoC;IACzE,MAAMoB,QAAQ,GAAGV,MAAM,CAACC,IAAI,CAAC3C,GAAG,CAACgC,IAAI,CAAC,EAAE,KAAK,CAAC;IAC9C,MAAMqB,QAAQ,GAAG,IAAI,CAACC,OAAO,CAACF,QAAQ,CAAC;IACvC,IAAI,CAACC,QAAQ,EAAE;MACb,OAAOvB,SAAS;IAClB;IAEA,MAAMyB,eAAe,GAAG,MAAMtD,uBAAuB,CAACoD,QAAQ,CAACG,MAAM,EAAE,IAAI,CAACvC,WAAW,CAAC;IACxF,IAAI,CAACsC,eAAe,EAAE;MACpB,OAAOzB,SAAS;IAClB;IAEA,MAAM2B,cAAc,GAAG,IAAI,CAACxC,WAAW,CAACyC,KAAK,CAC3CH,eAAe,CAACI,cAAc,EAC9BJ,eAAe,CAACI,cAAc,GAAGJ,eAAe,CAACK,cACnD,CAAC;IAED,OAAOH,cAAc;EACvB;AACF"}
|
|
@@ -1,33 +1,41 @@
|
|
|
1
|
+
const offsets = {
|
|
2
|
+
CD_COMPRESSED_SIZE_OFFSET: 20n,
|
|
3
|
+
CD_UNCOMPRESSED_SIZE_OFFSET: 24n,
|
|
4
|
+
CD_FILE_NAME_LENGTH_OFFSET: 28n,
|
|
5
|
+
CD_EXTRA_FIELD_LENGTH_OFFSET: 30n,
|
|
6
|
+
CD_LOCAL_HEADER_OFFSET_OFFSET: 42n,
|
|
7
|
+
CD_FILE_NAME_OFFSET: 46n
|
|
8
|
+
};
|
|
9
|
+
export const signature = [0x50, 0x4b, 0x01, 0x02];
|
|
1
10
|
export const parseZipCDFileHeader = async (headerOffset, buffer) => {
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
CD_FILE_NAME_OFFSET: 46
|
|
9
|
-
};
|
|
10
|
-
const compressedSize = await buffer.getUint32(headerOffset + offsets.CD_COMPRESSED_SIZE_OFFSET);
|
|
11
|
-
const uncompressedSize = await buffer.getUint32(headerOffset + offsets.CD_UNCOMPRESSED_SIZE_OFFSET);
|
|
11
|
+
if (Buffer.from(await buffer.slice(headerOffset, headerOffset + 4n)).compare(Buffer.from(signature)) !== 0) {
|
|
12
|
+
return null;
|
|
13
|
+
}
|
|
14
|
+
let compressedSize = BigInt(await buffer.getUint32(headerOffset + offsets.CD_COMPRESSED_SIZE_OFFSET));
|
|
15
|
+
let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + offsets.CD_UNCOMPRESSED_SIZE_OFFSET));
|
|
16
|
+
const extraFieldLength = await buffer.getUint16(headerOffset + offsets.CD_EXTRA_FIELD_LENGTH_OFFSET);
|
|
12
17
|
const fileNameLength = await buffer.getUint16(headerOffset + offsets.CD_FILE_NAME_LENGTH_OFFSET);
|
|
13
|
-
const fileName = new TextDecoder().decode(await buffer.slice(headerOffset + offsets.CD_FILE_NAME_OFFSET, headerOffset + offsets.CD_FILE_NAME_OFFSET + fileNameLength));
|
|
14
|
-
const extraOffset = headerOffset + offsets.CD_FILE_NAME_OFFSET + fileNameLength;
|
|
18
|
+
const fileName = new TextDecoder().decode(await buffer.slice(headerOffset + offsets.CD_FILE_NAME_OFFSET, headerOffset + offsets.CD_FILE_NAME_OFFSET + BigInt(fileNameLength)));
|
|
19
|
+
const extraOffset = headerOffset + offsets.CD_FILE_NAME_OFFSET + BigInt(fileNameLength);
|
|
15
20
|
const oldFormatOffset = await buffer.getUint32(headerOffset + offsets.CD_LOCAL_HEADER_OFFSET_OFFSET);
|
|
16
|
-
let fileDataOffset = oldFormatOffset;
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
21
|
+
let fileDataOffset = BigInt(oldFormatOffset);
|
|
22
|
+
let offsetInZip64Data = 4n;
|
|
23
|
+
if (uncompressedSize === BigInt(0xffffffff)) {
|
|
24
|
+
uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
25
|
+
offsetInZip64Data += 8n;
|
|
26
|
+
}
|
|
27
|
+
if (compressedSize === BigInt(0xffffffff)) {
|
|
28
|
+
compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
29
|
+
offsetInZip64Data += 8n;
|
|
30
|
+
}
|
|
31
|
+
if (fileDataOffset === BigInt(0xffffffff)) {
|
|
32
|
+
fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
26
33
|
}
|
|
27
34
|
const localHeaderOffset = fileDataOffset;
|
|
28
35
|
return {
|
|
29
36
|
compressedSize,
|
|
30
37
|
uncompressedSize,
|
|
38
|
+
extraFieldLength,
|
|
31
39
|
fileNameLength,
|
|
32
40
|
fileName,
|
|
33
41
|
extraOffset,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cd-file-header.js","names":["
|
|
1
|
+
{"version":3,"file":"cd-file-header.js","names":["offsets","CD_COMPRESSED_SIZE_OFFSET","CD_UNCOMPRESSED_SIZE_OFFSET","CD_FILE_NAME_LENGTH_OFFSET","CD_EXTRA_FIELD_LENGTH_OFFSET","CD_LOCAL_HEADER_OFFSET_OFFSET","CD_FILE_NAME_OFFSET","signature","parseZipCDFileHeader","headerOffset","buffer","Buffer","from","slice","compare","compressedSize","BigInt","getUint32","uncompressedSize","extraFieldLength","getUint16","fileNameLength","fileName","TextDecoder","decode","extraOffset","oldFormatOffset","fileDataOffset","offsetInZip64Data","getBigUint64","localHeaderOffset"],"sources":["../../../../../src/lib/parsers/parse-zip/cd-file-header.ts"],"sourcesContent":["import {FileProvider} from './file-provider';\nimport {ZipSignature} from './search-from-the-end';\n\n/**\n * zip central directory file header info\n * according to https://en.wikipedia.org/wiki/ZIP_(file_format)\n */\nexport type ZipCDFileHeader = {\n /** Compressed size */\n compressedSize: bigint;\n /** Uncompressed size */\n uncompressedSize: bigint;\n /** Extra field size */\n extraFieldLength: number;\n /** File name length */\n fileNameLength: number;\n /** File name */\n fileName: string;\n /** Extra field offset */\n extraOffset: bigint;\n /** Relative offset of local file header */\n localHeaderOffset: bigint;\n};\n\nconst offsets = {\n CD_COMPRESSED_SIZE_OFFSET: 20n,\n CD_UNCOMPRESSED_SIZE_OFFSET: 24n,\n CD_FILE_NAME_LENGTH_OFFSET: 28n,\n CD_EXTRA_FIELD_LENGTH_OFFSET: 30n,\n CD_LOCAL_HEADER_OFFSET_OFFSET: 42n,\n CD_FILE_NAME_OFFSET: 46n\n};\n\nexport const signature: ZipSignature = [0x50, 0x4b, 0x01, 0x02];\n\n/**\n * Parses central directory file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipCDFileHeader = async (\n headerOffset: bigint,\n buffer: FileProvider\n): Promise<ZipCDFileHeader | null> => {\n if (\n Buffer.from(await buffer.slice(headerOffset, headerOffset + 4n)).compare(\n Buffer.from(signature)\n ) !== 0\n ) {\n return null;\n }\n\n let compressedSize = BigInt(\n await buffer.getUint32(headerOffset + offsets.CD_COMPRESSED_SIZE_OFFSET)\n );\n\n let uncompressedSize = BigInt(\n await buffer.getUint32(headerOffset + offsets.CD_UNCOMPRESSED_SIZE_OFFSET)\n );\n\n const extraFieldLength = await buffer.getUint16(\n headerOffset + offsets.CD_EXTRA_FIELD_LENGTH_OFFSET\n );\n\n const fileNameLength = await buffer.getUint16(headerOffset + offsets.CD_FILE_NAME_LENGTH_OFFSET);\n\n const fileName = new TextDecoder().decode(\n await buffer.slice(\n headerOffset + offsets.CD_FILE_NAME_OFFSET,\n headerOffset + offsets.CD_FILE_NAME_OFFSET + BigInt(fileNameLength)\n )\n );\n\n const extraOffset = headerOffset + offsets.CD_FILE_NAME_OFFSET + BigInt(fileNameLength);\n\n const oldFormatOffset = await buffer.getUint32(\n headerOffset + offsets.CD_LOCAL_HEADER_OFFSET_OFFSET\n );\n\n let fileDataOffset = BigInt(oldFormatOffset);\n let offsetInZip64Data = 4n;\n // looking for info that might be also be in zip64 extra field\n if (uncompressedSize === BigInt(0xffffffff)) {\n uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);\n offsetInZip64Data += 8n;\n }\n if (compressedSize === BigInt(0xffffffff)) {\n compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);\n offsetInZip64Data += 8n;\n }\n if (fileDataOffset === BigInt(0xffffffff)) {\n fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data); // setting it to the one from zip64\n }\n const localHeaderOffset = fileDataOffset;\n\n return {\n compressedSize,\n uncompressedSize,\n extraFieldLength,\n fileNameLength,\n fileName,\n extraOffset,\n localHeaderOffset\n };\n};\n"],"mappings":"AAwBA,MAAMA,OAAO,GAAG;EACdC,yBAAyB,EAAE,GAAG;EAC9BC,2BAA2B,EAAE,GAAG;EAChCC,0BAA0B,EAAE,GAAG;EAC/BC,4BAA4B,EAAE,GAAG;EACjCC,6BAA6B,EAAE,GAAG;EAClCC,mBAAmB,EAAE;AACvB,CAAC;AAED,OAAO,MAAMC,SAAuB,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC;AAQ/D,OAAO,MAAMC,oBAAoB,GAAG,MAAAA,CAClCC,YAAoB,EACpBC,MAAoB,KACgB;EACpC,IACEC,MAAM,CAACC,IAAI,CAAC,MAAMF,MAAM,CAACG,KAAK,CAACJ,YAAY,EAAEA,YAAY,GAAG,EAAE,CAAC,CAAC,CAACK,OAAO,CACtEH,MAAM,CAACC,IAAI,CAACL,SAAS,CACvB,CAAC,KAAK,CAAC,EACP;IACA,OAAO,IAAI;EACb;EAEA,IAAIQ,cAAc,GAAGC,MAAM,CACzB,MAAMN,MAAM,CAACO,SAAS,CAACR,YAAY,GAAGT,OAAO,CAACC,yBAAyB,CACzE,CAAC;EAED,IAAIiB,gBAAgB,GAAGF,MAAM,CAC3B,MAAMN,MAAM,CAACO,SAAS,CAACR,YAAY,GAAGT,OAAO,CAACE,2BAA2B,CAC3E,CAAC;EAED,MAAMiB,gBAAgB,GAAG,MAAMT,MAAM,CAACU,SAAS,CAC7CX,YAAY,GAAGT,OAAO,CAACI,4BACzB,CAAC;EAED,MAAMiB,cAAc,GAAG,MAAMX,MAAM,CAACU,SAAS,CAACX,YAAY,GAAGT,OAAO,CAACG,0BAA0B,CAAC;EAEhG,MAAMmB,QAAQ,GAAG,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CACvC,MAAMd,MAAM,CAACG,KAAK,CAChBJ,YAAY,GAAGT,OAAO,CAACM,mBAAmB,EAC1CG,YAAY,GAAGT,OAAO,CAACM,mBAAmB,GAAGU,MAAM,CAACK,cAAc,CACpE,CACF,CAAC;EAED,MAAMI,WAAW,GAAGhB,YAAY,GAAGT,OAAO,CAACM,mBAAmB,GAAGU,MAAM,CAACK,cAAc,CAAC;EAEvF,MAAMK,eAAe,GAAG,MAAMhB,MAAM,CAACO,SAAS,CAC5CR,YAAY,GAAGT,OAAO,CAACK,6BACzB,CAAC;EAED,IAAIsB,cAAc,GAAGX,MAAM,CAACU,eAAe,CAAC;EAC5C,IAAIE,iBAAiB,GAAG,EAAE;EAE1B,IAAIV,gBAAgB,KAAKF,MAAM,CAAC,UAAU,CAAC,EAAE;IAC3CE,gBAAgB,GAAG,MAAMR,MAAM,CAACmB,YAAY,CAACJ,WAAW,GAAGG,iBAAiB,CAAC;IAC7EA,iBAAiB,IAAI,EAAE;EACzB;EACA,IAAIb,cAAc,KAAKC,MAAM,CAAC,UAAU,CAAC,EAAE;IACzCD,cAAc,GAAG,MAAML,MAAM,CAACmB,YAAY,CAACJ,WAAW,GAAGG,iBAAiB,CAAC;IAC3EA,iBAAiB,IAAI,EAAE;EACzB;EACA,IAAID,cAAc,KAAKX,MAAM,CAAC,UAAU,CAAC,EAAE;IACzCW,cAAc,GAAG,MAAMjB,MAAM,CAACmB,YAAY,CAACJ,WAAW,GAAGG,iBAAiB,CAAC;EAC7E;EACA,MAAME,iBAAiB,GAAGH,cAAc;EAExC,OAAO;IACLZ,cAAc;IACdG,gBAAgB;IAChBC,gBAAgB;IAChBE,cAAc;IACdC,QAAQ;IACRG,WAAW;IACXK;EACF,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
+
const toNumber = bigint => {
|
|
3
|
+
if (bigint > Number.MAX_SAFE_INTEGER) {
|
|
4
|
+
throw new Error('Offset is out of bounds');
|
|
5
|
+
}
|
|
6
|
+
return Number(bigint);
|
|
7
|
+
};
|
|
8
|
+
export class DataViewFileProvider {
|
|
9
|
+
constructor(file) {
|
|
10
|
+
_defineProperty(this, "file", void 0);
|
|
11
|
+
this.file = file;
|
|
12
|
+
}
|
|
13
|
+
async getUint8(offset) {
|
|
14
|
+
return this.file.getUint8(toNumber(offset));
|
|
15
|
+
}
|
|
16
|
+
async getUint16(offset) {
|
|
17
|
+
return this.file.getUint16(toNumber(offset), true);
|
|
18
|
+
}
|
|
19
|
+
async getUint32(offset) {
|
|
20
|
+
return this.file.getUint32(toNumber(offset), true);
|
|
21
|
+
}
|
|
22
|
+
async getBigUint64(offset) {
|
|
23
|
+
return this.file.getBigUint64(toNumber(offset), true);
|
|
24
|
+
}
|
|
25
|
+
async slice(startOffset, endOffset) {
|
|
26
|
+
return this.file.buffer.slice(toNumber(startOffset), toNumber(endOffset));
|
|
27
|
+
}
|
|
28
|
+
get length() {
|
|
29
|
+
return BigInt(this.file.byteLength);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
//# sourceMappingURL=data-view-file-provider.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data-view-file-provider.js","names":["toNumber","bigint","Number","MAX_SAFE_INTEGER","Error","DataViewFileProvider","constructor","file","_defineProperty","getUint8","offset","getUint16","getUint32","getBigUint64","slice","startOffset","endOffset","buffer","length","BigInt","byteLength"],"sources":["../../../../../src/lib/parsers/parse-zip/data-view-file-provider.ts"],"sourcesContent":["import {FileProvider} from './file-provider';\n\n/**\n * Checks if bigint can be converted to number and convert it if possible\n * @param bigint bigint to be converted\n * @returns number\n */\nconst toNumber = (bigint: bigint) => {\n if (bigint > Number.MAX_SAFE_INTEGER) {\n throw new Error('Offset is out of bounds');\n }\n return Number(bigint);\n};\n\n/** Provides file data using DataView */\nexport class DataViewFileProvider implements FileProvider {\n /** The DataView from which data is provided */\n private file: DataView;\n\n constructor(file: DataView) {\n this.file = file;\n }\n\n /**\n * Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.\n * @param offset The offset, in bytes, from the start of the file where to read the data.\n */\n async getUint8(offset: bigint): Promise<number> {\n return this.file.getUint8(toNumber(offset));\n }\n\n /**\n * Gets an unsigned 16-bit intege at the specified byte offset from the start of the file.\n * @param offset The offset, in bytes, from the start of the file where to read the data.\n */\n async getUint16(offset: bigint): Promise<number> {\n return this.file.getUint16(toNumber(offset), true);\n }\n\n /**\n * Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.\n * @param offset The offset, in bytes, from the start of the file where to read the data.\n */\n async getUint32(offset: bigint): Promise<number> {\n return this.file.getUint32(toNumber(offset), true);\n }\n\n /**\n * Gets an unsigned 64-bit integer at the specified byte offset from the start of the file.\n * @param offset The offset, in bytes, from the start of the file where to read the data.\n */\n async getBigUint64(offset: bigint): Promise<bigint> {\n return this.file.getBigUint64(toNumber(offset), true);\n }\n\n /**\n * returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.\n * @param startOffset The offset, in bytes, from the start of the file where to start reading the data.\n * @param endOffset The offset, in bytes, from the start of the file where to end reading the data.\n */\n async slice(startOffset: bigint, endOffset: bigint): Promise<ArrayBuffer> {\n return this.file.buffer.slice(toNumber(startOffset), toNumber(endOffset));\n }\n\n /** the length (in bytes) of the data. */\n get length() {\n return BigInt(this.file.byteLength);\n }\n}\n"],"mappings":";AAOA,MAAMA,QAAQ,GAAIC,MAAc,IAAK;EACnC,IAAIA,MAAM,GAAGC,MAAM,CAACC,gBAAgB,EAAE;IACpC,MAAM,IAAIC,KAAK,CAAC,yBAAyB,CAAC;EAC5C;EACA,OAAOF,MAAM,CAACD,MAAM,CAAC;AACvB,CAAC;AAGD,OAAO,MAAMI,oBAAoB,CAAyB;EAIxDC,WAAWA,CAACC,IAAc,EAAE;IAAAC,eAAA;IAC1B,IAAI,CAACD,IAAI,GAAGA,IAAI;EAClB;EAMA,MAAME,QAAQA,CAACC,MAAc,EAAmB;IAC9C,OAAO,IAAI,CAACH,IAAI,CAACE,QAAQ,CAACT,QAAQ,CAACU,MAAM,CAAC,CAAC;EAC7C;EAMA,MAAMC,SAASA,CAACD,MAAc,EAAmB;IAC/C,OAAO,IAAI,CAACH,IAAI,CAACI,SAAS,CAACX,QAAQ,CAACU,MAAM,CAAC,EAAE,IAAI,CAAC;EACpD;EAMA,MAAME,SAASA,CAACF,MAAc,EAAmB;IAC/C,OAAO,IAAI,CAACH,IAAI,CAACK,SAAS,CAACZ,QAAQ,CAACU,MAAM,CAAC,EAAE,IAAI,CAAC;EACpD;EAMA,MAAMG,YAAYA,CAACH,MAAc,EAAmB;IAClD,OAAO,IAAI,CAACH,IAAI,CAACM,YAAY,CAACb,QAAQ,CAACU,MAAM,CAAC,EAAE,IAAI,CAAC;EACvD;EAOA,MAAMI,KAAKA,CAACC,WAAmB,EAAEC,SAAiB,EAAwB;IACxE,OAAO,IAAI,CAACT,IAAI,CAACU,MAAM,CAACH,KAAK,CAACd,QAAQ,CAACe,WAAW,CAAC,EAAEf,QAAQ,CAACgB,SAAS,CAAC,CAAC;EAC3E;EAGA,IAAIE,MAAMA,CAAA,EAAG;IACX,OAAOC,MAAM,CAAC,IAAI,CAACZ,IAAI,CAACa,UAAU,CAAC;EACrC;AACF"}
|