@loaders.gl/tile-converter 4.1.0-alpha.10 → 4.1.0-alpha.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/3d-tiles-converter/helpers/b3dm-converter.d.ts.map +1 -1
- package/dist/3d-tiles-converter/helpers/b3dm-converter.js +8 -0
- package/dist/3d-tiles-converter/helpers/b3dm-converter.js.map +1 -1
- package/dist/converter-cli.js +2 -2
- package/dist/converter-cli.js.map +1 -1
- package/dist/converter.min.cjs +131 -220
- package/dist/deps-installer/deps-installer.js +1 -1
- package/dist/i3s-converter/i3s-converter.d.ts.map +1 -1
- package/dist/i3s-converter/i3s-converter.js +6 -6
- package/dist/i3s-converter/i3s-converter.js.map +1 -1
- package/dist/i3s-server/bin/i3s-server.min.cjs +76 -76
- package/dist/index.cjs +69 -106
- package/dist/lib/utils/compress-util.d.ts +0 -37
- package/dist/lib/utils/compress-util.d.ts.map +1 -1
- package/dist/lib/utils/compress-util.js +1 -149
- package/dist/lib/utils/compress-util.js.map +1 -1
- package/dist/lib/utils/conversion-dump.d.ts +2 -1
- package/dist/lib/utils/conversion-dump.d.ts.map +1 -1
- package/dist/lib/utils/conversion-dump.js +14 -10
- package/dist/lib/utils/conversion-dump.js.map +1 -1
- package/dist/pgm-loader.js +1 -1
- package/package.json +14 -14
- package/src/3d-tiles-converter/helpers/b3dm-converter.ts +19 -0
- package/src/converter-cli.ts +2 -2
- package/src/i3s-converter/i3s-converter.ts +8 -36
- package/src/lib/utils/compress-util.ts +1 -264
- package/src/lib/utils/conversion-dump.ts +16 -11
|
@@ -1,13 +1,5 @@
|
|
|
1
1
|
import { createGzip } from 'zlib';
|
|
2
|
-
import {
|
|
3
|
-
import { promises as fs, createReadStream, createWriteStream } from 'fs';
|
|
4
|
-
import archiver from 'archiver';
|
|
5
|
-
import { removeFile } from "./file-utils.js";
|
|
6
|
-
import { ChildProcessProxy } from '@loaders.gl/worker-utils';
|
|
7
|
-
import JSZip from 'jszip';
|
|
8
|
-
import { MD5Hash } from '@loaders.gl/crypto';
|
|
9
|
-
import crypt from 'crypt';
|
|
10
|
-
import { getAbsoluteFilePath } from "./file-utils.js";
|
|
2
|
+
import { createReadStream, createWriteStream } from 'fs';
|
|
11
3
|
export function compressFileWithGzip(pathFile) {
|
|
12
4
|
const compressedPathFile = `${pathFile}.gz`;
|
|
13
5
|
const gzip = createGzip();
|
|
@@ -25,144 +17,4 @@ export function compressFileWithGzip(pathFile) {
|
|
|
25
17
|
input.pipe(gzip).pipe(output);
|
|
26
18
|
});
|
|
27
19
|
}
|
|
28
|
-
export async function compressFilesWithZip(fileMap, outputFile) {
|
|
29
|
-
let level = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
|
|
30
|
-
try {
|
|
31
|
-
await removeFile(outputFile);
|
|
32
|
-
} catch (e) {}
|
|
33
|
-
const output = createWriteStream(outputFile);
|
|
34
|
-
const archive = archiver('zip', {
|
|
35
|
-
zlib: {
|
|
36
|
-
level
|
|
37
|
-
}
|
|
38
|
-
});
|
|
39
|
-
return new Promise(async (resolve, reject) => {
|
|
40
|
-
output.on('close', function () {
|
|
41
|
-
console.log(`${outputFile} saved.`);
|
|
42
|
-
console.log(`${archive.pointer()} total bytes`);
|
|
43
|
-
resolve(null);
|
|
44
|
-
});
|
|
45
|
-
output.on('end', function () {
|
|
46
|
-
console.log('Data has been drained');
|
|
47
|
-
resolve(null);
|
|
48
|
-
});
|
|
49
|
-
archive.on('warning', function (err) {
|
|
50
|
-
console.log(err);
|
|
51
|
-
reject(err);
|
|
52
|
-
});
|
|
53
|
-
archive.on('error', function (err) {
|
|
54
|
-
reject(err);
|
|
55
|
-
});
|
|
56
|
-
archive.pipe(output);
|
|
57
|
-
for (const subFileName in fileMap) {
|
|
58
|
-
const subFileData = fileMap[subFileName];
|
|
59
|
-
await appendFileToArchive(archive, subFileName, subFileData);
|
|
60
|
-
}
|
|
61
|
-
archive.finalize();
|
|
62
|
-
});
|
|
63
|
-
}
|
|
64
|
-
export async function compressWithChildProcess(inputFolder, outputFile, level, inputFiles, sevenZipExe) {
|
|
65
|
-
if (process.platform === 'win32') {
|
|
66
|
-
await compressWithChildProcessWindows(inputFolder, outputFile, level, inputFiles, sevenZipExe);
|
|
67
|
-
} else {
|
|
68
|
-
await compressWithChildProcessUnix(inputFolder, outputFile, level, inputFiles);
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
async function compressWithChildProcessUnix(inputFolder, outputFile) {
|
|
72
|
-
let level = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
|
|
73
|
-
let inputFiles = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : '.';
|
|
74
|
-
const fullOutputFile = getAbsoluteFilePath(outputFile);
|
|
75
|
-
const args = [`-${level}`, '-r', fullOutputFile, inputFiles];
|
|
76
|
-
const childProcess = new ChildProcessProxy();
|
|
77
|
-
await childProcess.start({
|
|
78
|
-
command: 'zip',
|
|
79
|
-
arguments: args,
|
|
80
|
-
spawn: {
|
|
81
|
-
cwd: inputFolder
|
|
82
|
-
},
|
|
83
|
-
wait: 0
|
|
84
|
-
});
|
|
85
|
-
}
|
|
86
|
-
async function compressWithChildProcessWindows(inputFolder, outputFile) {
|
|
87
|
-
let level = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
|
|
88
|
-
let inputFiles = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : join('.', '*');
|
|
89
|
-
let sevenZipExe = arguments.length > 4 ? arguments[4] : undefined;
|
|
90
|
-
if (inputFiles[0] === '@') {
|
|
91
|
-
inputFiles = `*${inputFiles.substr(1)}`;
|
|
92
|
-
}
|
|
93
|
-
const fullOutputFile = getAbsoluteFilePath(outputFile);
|
|
94
|
-
const args = ['a', '-tzip', `-mx=${level}`, fullOutputFile, inputFiles];
|
|
95
|
-
const childProcess = new ChildProcessProxy();
|
|
96
|
-
await childProcess.start({
|
|
97
|
-
command: sevenZipExe,
|
|
98
|
-
arguments: args,
|
|
99
|
-
spawn: {
|
|
100
|
-
cwd: `${inputFolder}`
|
|
101
|
-
},
|
|
102
|
-
wait: 0
|
|
103
|
-
});
|
|
104
|
-
}
|
|
105
|
-
export async function generateHash128FromZip(inputZipFile, outputFile) {
|
|
106
|
-
const input = await fs.readFile(inputZipFile);
|
|
107
|
-
const zip = await JSZip.loadAsync(input);
|
|
108
|
-
const hashTable = [];
|
|
109
|
-
const zipFiles = zip.files;
|
|
110
|
-
for (const relativePath in zipFiles) {
|
|
111
|
-
const zipEntry = zipFiles[relativePath];
|
|
112
|
-
const _data = '_data';
|
|
113
|
-
const content = zipEntry[_data].compressedContent;
|
|
114
|
-
if (zipEntry.dir) continue;
|
|
115
|
-
const hash = await new MD5Hash().hash(Buffer.from(relativePath.toLowerCase()), 'base64');
|
|
116
|
-
hashTable.push({
|
|
117
|
-
key: atob(hash),
|
|
118
|
-
value: content.byteOffset
|
|
119
|
-
});
|
|
120
|
-
}
|
|
121
|
-
hashTable.sort((prev, next) => {
|
|
122
|
-
if (prev.key === next.key) {
|
|
123
|
-
return prev.value < next.value ? -1 : 1;
|
|
124
|
-
}
|
|
125
|
-
return prev.key < next.key ? -1 : 1;
|
|
126
|
-
});
|
|
127
|
-
const output = createWriteStream(outputFile);
|
|
128
|
-
return new Promise((resolve, reject) => {
|
|
129
|
-
output.on('close', function () {
|
|
130
|
-
console.log(`${outputFile} generated and saved`);
|
|
131
|
-
resolve(null);
|
|
132
|
-
});
|
|
133
|
-
output.on('error', function (err) {
|
|
134
|
-
console.log(err);
|
|
135
|
-
reject(err);
|
|
136
|
-
});
|
|
137
|
-
for (const key in hashTable) {
|
|
138
|
-
const item = hashTable[key];
|
|
139
|
-
const value = longToByteArray(item.value);
|
|
140
|
-
output.write(Buffer.from(crypt.hexToBytes(item.key).concat(value)));
|
|
141
|
-
}
|
|
142
|
-
output.close();
|
|
143
|
-
});
|
|
144
|
-
}
|
|
145
|
-
function longToByteArray(long) {
|
|
146
|
-
const buffer = new ArrayBuffer(8);
|
|
147
|
-
const longNum = new Float64Array(buffer);
|
|
148
|
-
longNum[0] = parseInt(long);
|
|
149
|
-
return Array.from(new Uint8Array(buffer)).reverse();
|
|
150
|
-
}
|
|
151
|
-
export async function addFileToZip(inputFolder, fileName, zipFile, sevenZipExe) {
|
|
152
|
-
await compressWithChildProcess(inputFolder, zipFile, 0, fileName, sevenZipExe);
|
|
153
|
-
console.log(`${fileName} added to ${zipFile}.`);
|
|
154
|
-
}
|
|
155
|
-
function appendFileToArchive(archive, subFileName, subFileData) {
|
|
156
|
-
return new Promise(resolve => {
|
|
157
|
-
const fileStream = createReadStream(subFileData);
|
|
158
|
-
console.log(`Compression start: ${subFileName}`);
|
|
159
|
-
fileStream.on('close', () => {
|
|
160
|
-
console.log(`Compression finish: ${subFileName}`);
|
|
161
|
-
resolve(null);
|
|
162
|
-
});
|
|
163
|
-
archive.append(fileStream, {
|
|
164
|
-
name: subFileName
|
|
165
|
-
});
|
|
166
|
-
});
|
|
167
|
-
}
|
|
168
20
|
//# sourceMappingURL=compress-util.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"compress-util.js","names":["createGzip","join","promises","fs","createReadStream","createWriteStream","archiver","removeFile","ChildProcessProxy","JSZip","MD5Hash","crypt","getAbsoluteFilePath","compressFileWithGzip","pathFile","compressedPathFile","gzip","input","output","Promise","resolve","reject","on","console","log","error","pipe","compressFilesWithZip","fileMap","outputFile","level","arguments","length","undefined","e","archive","zlib","pointer","err","subFileName","subFileData","appendFileToArchive","finalize","compressWithChildProcess","inputFolder","inputFiles","sevenZipExe","process","platform","compressWithChildProcessWindows","compressWithChildProcessUnix","fullOutputFile","args","childProcess","start","command","spawn","cwd","wait","substr","generateHash128FromZip","inputZipFile","readFile","zip","loadAsync","hashTable","zipFiles","files","relativePath","zipEntry","_data","content","compressedContent","dir","hash","Buffer","from","toLowerCase","push","key","atob","value","byteOffset","sort","prev","next","item","longToByteArray","write","hexToBytes","concat","close","long","buffer","ArrayBuffer","longNum","Float64Array","parseInt","Array","Uint8Array","reverse","addFileToZip","fileName","zipFile","fileStream","append","name"],"sources":["../../../src/lib/utils/compress-util.ts"],"sourcesContent":["import {createGzip} from 'zlib';\nimport {join} from 'path';\nimport {promises as fs, createReadStream, createWriteStream} from 'fs';\nimport archiver from 'archiver';\nimport {removeFile} from './file-utils';\nimport {ChildProcessProxy} from '@loaders.gl/worker-utils';\nimport JSZip from 'jszip';\nimport {MD5Hash} from '@loaders.gl/crypto';\nimport crypt from 'crypt';\nimport {getAbsoluteFilePath} from './file-utils';\n\n/**\n * Compress file to gzip file\n *\n * @param pathFile - the path to the file\n * @return the path to the gzip file\n */\nexport function compressFileWithGzip(pathFile: string): Promise<string> {\n const compressedPathFile = `${pathFile}.gz`;\n const gzip = createGzip();\n const input = createReadStream(pathFile);\n const output = createWriteStream(compressedPathFile);\n\n return new Promise((resolve, reject) => {\n input.on('end', () => {\n console.log(`${compressedPathFile} compressed and saved.`); // eslint-disable-line no-undef,no-console\n resolve(compressedPathFile);\n });\n input.on('error', (error) => {\n console.log(`${compressedPathFile}: compression error!`); // eslint-disable-line no-undef,no-console\n reject(error);\n });\n input.pipe(gzip).pipe(output);\n });\n}\n\n/**\n * Compress files from map into slpk file\n *\n * @param fileMap - map with file paths (key: output path, value: input path)\n * @param outputFile - output slpk file\n * @param level - compression level\n */\nexport async function compressFilesWithZip(\n fileMap: {[key: string]: string},\n outputFile: string,\n level: number = 0\n) {\n // Before creating a new file, we need to delete the old file\n try {\n await removeFile(outputFile);\n } catch (e) {\n // Do nothing if old file doesn't exist\n }\n\n const output = createWriteStream(outputFile);\n const archive = archiver('zip', {\n zlib: {level} // Sets the compression level.\n });\n\n return new Promise(async (resolve, reject) => {\n // listen for all archive data to be writte\n // 'close' event is fired only when a file descriptor is involved\n output.on('close', function () {\n console.log(`${outputFile} saved.`); // eslint-disable-line no-undef,no-console\n console.log(`${archive.pointer()} total bytes`); // eslint-disable-line no-undef,no-console\n resolve(null);\n });\n\n // This event is fired when the data source is drained no matter what was the data source.\n // It is not part of this library but rather from the NodeJS Stream API.\n // @see: https://nodejs.org/api/stream.html#stream_event_end\n output.on('end', function () {\n console.log('Data has been drained'); // eslint-disable-line no-undef,no-console\n resolve(null);\n });\n\n // good practice to catch warnings (ie stat failures and other non-blocking errors)\n archive.on('warning', function (err) {\n console.log(err); // eslint-disable-line no-undef,no-console\n reject(err);\n });\n\n // good practice to catch this error explicitly\n archive.on('error', function (err) {\n reject(err);\n });\n\n // pipe archive data to the file\n archive.pipe(output);\n\n for (const subFileName in fileMap) {\n const subFileData = fileMap[subFileName];\n await appendFileToArchive(archive, subFileName, subFileData);\n }\n\n // finalize the archive (ie we are done appending files but streams have to finish yet)\n archive.finalize();\n });\n}\n\n/**\n * Compress files using external tool 'zip'/'7z'\n *\n * @param inputFolder - folder to archive - for cwd option\n * @param outputFile - output slpk file\n * @param level - compression level\n * @param inputFiles - input files path to pass to the executable as option\n * @param sevenZipExe - path to 7z.exe executable\n */\nexport async function compressWithChildProcess(\n inputFolder: string,\n outputFile: string,\n level: number,\n inputFiles: string,\n sevenZipExe: string\n) {\n // eslint-disable-next-line no-undef\n if (process.platform === 'win32') {\n await compressWithChildProcessWindows(inputFolder, outputFile, level, inputFiles, sevenZipExe);\n } else {\n await compressWithChildProcessUnix(inputFolder, outputFile, level, inputFiles);\n }\n}\n\n/**\n * Compress files using external linux tool 'zip'\n *\n * @param inputFolder - folder to archive - for cwd option\n * @param outputFile - output slpk file\n * @param level - compression level\n * @param inputFiles - input files path to pass to the executable as option\n */\nasync function compressWithChildProcessUnix(\n inputFolder: string,\n outputFile: string,\n level: number = 0,\n inputFiles: string = '.'\n) {\n const fullOutputFile = getAbsoluteFilePath(outputFile);\n const args = [`-${level}`, '-r', fullOutputFile, inputFiles];\n const childProcess = new ChildProcessProxy();\n await childProcess.start({\n command: 'zip',\n arguments: args,\n spawn: {\n cwd: inputFolder\n },\n wait: 0\n });\n}\n\n/**\n * Compress files using windows external tool '7z'\n *\n * @param inputFolder - folder to archive - for cwd option\n * @param outputFile - output slpk file\n * @param level - compression level\n * @param inputFiles - input files path to pass to the executable as option\n * @param sevenZipExe - path to 7z.exe executable\n */\nasync function compressWithChildProcessWindows(\n inputFolder: string,\n outputFile: string,\n level: number = 0,\n inputFiles: string = join('.', '*'),\n sevenZipExe: string\n) {\n // Workaround for @listfile issue. In 7z.exe @-leading files are handled as listfiles\n // https://sevenzip.osdn.jp/chm/cmdline/syntax.htm\n if (inputFiles[0] === '@') {\n inputFiles = `*${inputFiles.substr(1)}`;\n }\n\n const fullOutputFile = getAbsoluteFilePath(outputFile);\n const args = ['a', '-tzip', `-mx=${level}`, fullOutputFile, inputFiles];\n const childProcess = new ChildProcessProxy();\n await childProcess.start({\n command: sevenZipExe,\n arguments: args,\n spawn: {\n cwd: `${inputFolder}`\n },\n wait: 0\n });\n}\n\n/**\n * Generate hash file from zip archive\n * https://github.com/Esri/i3s-spec/blob/master/docs/1.7/slpk_hashtable.cmn.md\n *\n * @param inputZipFile\n * @param outputFile\n */\nexport async function generateHash128FromZip(inputZipFile: string, outputFile: string) {\n const input = await fs.readFile(inputZipFile);\n const zip = await JSZip.loadAsync(input);\n const hashTable: {key: string; value: string}[] = [];\n const zipFiles = zip.files;\n for (const relativePath in zipFiles) {\n const zipEntry = zipFiles[relativePath];\n // Had to use a workaround because the correct string is getting the wrong data\n // const content = await zipEntry.async('nodebuffer');\n // _data isn't described in the interface, so lint thought it was wrong\n const _data = '_data';\n const content = zipEntry[_data].compressedContent;\n if (zipEntry.dir) continue; // eslint-disable-line no-continue\n // eslint-disable-next-line no-undef\n const hash = await new MD5Hash().hash(Buffer.from(relativePath.toLowerCase()), 'base64');\n // eslint-disable-next-line no-undef\n hashTable.push({key: atob(hash), value: content.byteOffset});\n }\n\n hashTable.sort((prev, next) => {\n if (prev.key === next.key) {\n return prev.value < next.value ? -1 : 1;\n }\n return prev.key < next.key ? -1 : 1;\n });\n\n const output = createWriteStream(outputFile);\n return new Promise((resolve, reject) => {\n output.on('close', function () {\n console.log(`${outputFile} generated and saved`); // eslint-disable-line\n resolve(null);\n });\n output.on('error', function (err) {\n console.log(err); // eslint-disable-line\n reject(err);\n });\n for (const key in hashTable) {\n const item = hashTable[key];\n const value = longToByteArray(item.value);\n // TODO: perhaps you need to wait for the 'drain' event if the write returns 'false'\n // eslint-disable-next-line no-undef\n output.write(Buffer.from(crypt.hexToBytes(item.key).concat(value)));\n }\n output.close();\n });\n}\n\n/**\n * Encode 64 bit value to byte array\n *\n * @param long - stringified number\n * @returns\n */\nfunction longToByteArray(long: string): number[] {\n const buffer = new ArrayBuffer(8); // JS numbers are 8 bytes long, or 64 bits\n const longNum = new Float64Array(buffer); // so equivalent to Float64\n longNum[0] = parseInt(long);\n return Array.from(new Uint8Array(buffer)).reverse(); // reverse to get little endian\n}\n\n/**\n * Add file to zip archive\n *\n * @param inputFile\n * @param fileName\n * @param zipFile\n * @param sevenZipExe\n */\nexport async function addFileToZip(\n inputFolder: string,\n fileName: string,\n zipFile: string,\n sevenZipExe: string\n) {\n await compressWithChildProcess(inputFolder, zipFile, 0, fileName, sevenZipExe);\n console.log(`${fileName} added to ${zipFile}.`); // eslint-disable-line\n}\n\n/**\n *\n * @param archive zip archive instance\n * @param subFileName file path inside archive\n * @param subFileData source file path\n * @returns\n */\nfunction appendFileToArchive(archive: any, subFileName: string, subFileData: string) {\n return new Promise((resolve) => {\n const fileStream = createReadStream(subFileData);\n console.log(`Compression start: ${subFileName}`); // eslint-disable-line no-undef,no-console\n fileStream.on('close', () => {\n console.log(`Compression finish: ${subFileName}`); // eslint-disable-line no-undef,no-console\n resolve(null);\n });\n archive.append(fileStream, {name: subFileName});\n });\n}\n"],"mappings":"AAAA,SAAQA,UAAU,QAAO,MAAM;AAC/B,SAAQC,IAAI,QAAO,MAAM;AACzB,SAAQC,QAAQ,IAAIC,EAAE,EAAEC,gBAAgB,EAAEC,iBAAiB,QAAO,IAAI;AACtE,OAAOC,QAAQ,MAAM,UAAU;AAAC,SACxBC,UAAU;AAClB,SAAQC,iBAAiB,QAAO,0BAA0B;AAC1D,OAAOC,KAAK,MAAM,OAAO;AACzB,SAAQC,OAAO,QAAO,oBAAoB;AAC1C,OAAOC,KAAK,MAAM,OAAO;AAAC,SAClBC,mBAAmB;AAQ3B,OAAO,SAASC,oBAAoBA,CAACC,QAAgB,EAAmB;EACtE,MAAMC,kBAAkB,GAAI,GAAED,QAAS,KAAI;EAC3C,MAAME,IAAI,GAAGhB,UAAU,CAAC,CAAC;EACzB,MAAMiB,KAAK,GAAGb,gBAAgB,CAACU,QAAQ,CAAC;EACxC,MAAMI,MAAM,GAAGb,iBAAiB,CAACU,kBAAkB,CAAC;EAEpD,OAAO,IAAII,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCJ,KAAK,CAACK,EAAE,CAAC,KAAK,EAAE,MAAM;MACpBC,OAAO,CAACC,GAAG,CAAE,GAAET,kBAAmB,wBAAuB,CAAC;MAC1DK,OAAO,CAACL,kBAAkB,CAAC;IAC7B,CAAC,CAAC;IACFE,KAAK,CAACK,EAAE,CAAC,OAAO,EAAGG,KAAK,IAAK;MAC3BF,OAAO,CAACC,GAAG,CAAE,GAAET,kBAAmB,sBAAqB,CAAC;MACxDM,MAAM,CAACI,KAAK,CAAC;IACf,CAAC,CAAC;IACFR,KAAK,CAACS,IAAI,CAACV,IAAI,CAAC,CAACU,IAAI,CAACR,MAAM,CAAC;EAC/B,CAAC,CAAC;AACJ;AASA,OAAO,eAAeS,oBAAoBA,CACxCC,OAAgC,EAChCC,UAAkB,EAElB;EAAA,IADAC,KAAa,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC;EAGjB,IAAI;IACF,MAAMxB,UAAU,CAACsB,UAAU,CAAC;EAC9B,CAAC,CAAC,OAAOK,CAAC,EAAE,CAEZ;EAEA,MAAMhB,MAAM,GAAGb,iBAAiB,CAACwB,UAAU,CAAC;EAC5C,MAAMM,OAAO,GAAG7B,QAAQ,CAAC,KAAK,EAAE;IAC9B8B,IAAI,EAAE;MAACN;IAAK;EACd,CAAC,CAAC;EAEF,OAAO,IAAIX,OAAO,CAAC,OAAOC,OAAO,EAAEC,MAAM,KAAK;IAG5CH,MAAM,CAACI,EAAE,CAAC,OAAO,EAAE,YAAY;MAC7BC,OAAO,CAACC,GAAG,CAAE,GAAEK,UAAW,SAAQ,CAAC;MACnCN,OAAO,CAACC,GAAG,CAAE,GAAEW,OAAO,CAACE,OAAO,CAAC,CAAE,cAAa,CAAC;MAC/CjB,OAAO,CAAC,IAAI,CAAC;IACf,CAAC,CAAC;IAKFF,MAAM,CAACI,EAAE,CAAC,KAAK,EAAE,YAAY;MAC3BC,OAAO,CAACC,GAAG,CAAC,uBAAuB,CAAC;MACpCJ,OAAO,CAAC,IAAI,CAAC;IACf,CAAC,CAAC;IAGFe,OAAO,CAACb,EAAE,CAAC,SAAS,EAAE,UAAUgB,GAAG,EAAE;MACnCf,OAAO,CAACC,GAAG,CAACc,GAAG,CAAC;MAChBjB,MAAM,CAACiB,GAAG,CAAC;IACb,CAAC,CAAC;IAGFH,OAAO,CAACb,EAAE,CAAC,OAAO,EAAE,UAAUgB,GAAG,EAAE;MACjCjB,MAAM,CAACiB,GAAG,CAAC;IACb,CAAC,CAAC;IAGFH,OAAO,CAACT,IAAI,CAACR,MAAM,CAAC;IAEpB,KAAK,MAAMqB,WAAW,IAAIX,OAAO,EAAE;MACjC,MAAMY,WAAW,GAAGZ,OAAO,CAACW,WAAW,CAAC;MACxC,MAAME,mBAAmB,CAACN,OAAO,EAAEI,WAAW,EAAEC,WAAW,CAAC;IAC9D;IAGAL,OAAO,CAACO,QAAQ,CAAC,CAAC;EACpB,CAAC,CAAC;AACJ;AAWA,OAAO,eAAeC,wBAAwBA,CAC5CC,WAAmB,EACnBf,UAAkB,EAClBC,KAAa,EACbe,UAAkB,EAClBC,WAAmB,EACnB;EAEA,IAAIC,OAAO,CAACC,QAAQ,KAAK,OAAO,EAAE;IAChC,MAAMC,+BAA+B,CAACL,WAAW,EAAEf,UAAU,EAAEC,KAAK,EAAEe,UAAU,EAAEC,WAAW,CAAC;EAChG,CAAC,MAAM;IACL,MAAMI,4BAA4B,CAACN,WAAW,EAAEf,UAAU,EAAEC,KAAK,EAAEe,UAAU,CAAC;EAChF;AACF;AAUA,eAAeK,4BAA4BA,CACzCN,WAAmB,EACnBf,UAAkB,EAGlB;EAAA,IAFAC,KAAa,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC;EAAA,IACjBc,UAAkB,GAAAd,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,GAAG;EAExB,MAAMoB,cAAc,GAAGvC,mBAAmB,CAACiB,UAAU,CAAC;EACtD,MAAMuB,IAAI,GAAG,CAAE,IAAGtB,KAAM,EAAC,EAAE,IAAI,EAAEqB,cAAc,EAAEN,UAAU,CAAC;EAC5D,MAAMQ,YAAY,GAAG,IAAI7C,iBAAiB,CAAC,CAAC;EAC5C,MAAM6C,YAAY,CAACC,KAAK,CAAC;IACvBC,OAAO,EAAE,KAAK;IACdxB,SAAS,EAAEqB,IAAI;IACfI,KAAK,EAAE;MACLC,GAAG,EAAEb;IACP,CAAC;IACDc,IAAI,EAAE;EACR,CAAC,CAAC;AACJ;AAWA,eAAeT,+BAA+BA,CAC5CL,WAAmB,EACnBf,UAAkB,EAIlB;EAAA,IAHAC,KAAa,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC;EAAA,IACjBc,UAAkB,GAAAd,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG9B,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC;EAAA,IACnC6C,WAAmB,GAAAf,SAAA,CAAAC,MAAA,OAAAD,SAAA,MAAAE,SAAA;EAInB,IAAIY,UAAU,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;IACzBA,UAAU,GAAI,IAAGA,UAAU,CAACc,MAAM,CAAC,CAAC,CAAE,EAAC;EACzC;EAEA,MAAMR,cAAc,GAAGvC,mBAAmB,CAACiB,UAAU,CAAC;EACtD,MAAMuB,IAAI,GAAG,CAAC,GAAG,EAAE,OAAO,EAAG,OAAMtB,KAAM,EAAC,EAAEqB,cAAc,EAAEN,UAAU,CAAC;EACvE,MAAMQ,YAAY,GAAG,IAAI7C,iBAAiB,CAAC,CAAC;EAC5C,MAAM6C,YAAY,CAACC,KAAK,CAAC;IACvBC,OAAO,EAAET,WAAW;IACpBf,SAAS,EAAEqB,IAAI;IACfI,KAAK,EAAE;MACLC,GAAG,EAAG,GAAEb,WAAY;IACtB,CAAC;IACDc,IAAI,EAAE;EACR,CAAC,CAAC;AACJ;AASA,OAAO,eAAeE,sBAAsBA,CAACC,YAAoB,EAAEhC,UAAkB,EAAE;EACrF,MAAMZ,KAAK,GAAG,MAAMd,EAAE,CAAC2D,QAAQ,CAACD,YAAY,CAAC;EAC7C,MAAME,GAAG,GAAG,MAAMtD,KAAK,CAACuD,SAAS,CAAC/C,KAAK,CAAC;EACxC,MAAMgD,SAAyC,GAAG,EAAE;EACpD,MAAMC,QAAQ,GAAGH,GAAG,CAACI,KAAK;EAC1B,KAAK,MAAMC,YAAY,IAAIF,QAAQ,EAAE;IACnC,MAAMG,QAAQ,GAAGH,QAAQ,CAACE,YAAY,CAAC;IAIvC,MAAME,KAAK,GAAG,OAAO;IACrB,MAAMC,OAAO,GAAGF,QAAQ,CAACC,KAAK,CAAC,CAACE,iBAAiB;IACjD,IAAIH,QAAQ,CAACI,GAAG,EAAE;IAElB,MAAMC,IAAI,GAAG,MAAM,IAAIhE,OAAO,CAAC,CAAC,CAACgE,IAAI,CAACC,MAAM,CAACC,IAAI,CAACR,YAAY,CAACS,WAAW,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC;IAExFZ,SAAS,CAACa,IAAI,CAAC;MAACC,GAAG,EAAEC,IAAI,CAACN,IAAI,CAAC;MAAEO,KAAK,EAAEV,OAAO,CAACW;IAAU,CAAC,CAAC;EAC9D;EAEAjB,SAAS,CAACkB,IAAI,CAAC,CAACC,IAAI,EAAEC,IAAI,KAAK;IAC7B,IAAID,IAAI,CAACL,GAAG,KAAKM,IAAI,CAACN,GAAG,EAAE;MACzB,OAAOK,IAAI,CAACH,KAAK,GAAGI,IAAI,CAACJ,KAAK,GAAG,CAAC,CAAC,GAAG,CAAC;IACzC;IACA,OAAOG,IAAI,CAACL,GAAG,GAAGM,IAAI,CAACN,GAAG,GAAG,CAAC,CAAC,GAAG,CAAC;EACrC,CAAC,CAAC;EAEF,MAAM7D,MAAM,GAAGb,iBAAiB,CAACwB,UAAU,CAAC;EAC5C,OAAO,IAAIV,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCH,MAAM,CAACI,EAAE,CAAC,OAAO,EAAE,YAAY;MAC7BC,OAAO,CAACC,GAAG,CAAE,GAAEK,UAAW,sBAAqB,CAAC;MAChDT,OAAO,CAAC,IAAI,CAAC;IACf,CAAC,CAAC;IACFF,MAAM,CAACI,EAAE,CAAC,OAAO,EAAE,UAAUgB,GAAG,EAAE;MAChCf,OAAO,CAACC,GAAG,CAACc,GAAG,CAAC;MAChBjB,MAAM,CAACiB,GAAG,CAAC;IACb,CAAC,CAAC;IACF,KAAK,MAAMyC,GAAG,IAAId,SAAS,EAAE;MAC3B,MAAMqB,IAAI,GAAGrB,SAAS,CAACc,GAAG,CAAC;MAC3B,MAAME,KAAK,GAAGM,eAAe,CAACD,IAAI,CAACL,KAAK,CAAC;MAGzC/D,MAAM,CAACsE,KAAK,CAACb,MAAM,CAACC,IAAI,CAACjE,KAAK,CAAC8E,UAAU,CAACH,IAAI,CAACP,GAAG,CAAC,CAACW,MAAM,CAACT,KAAK,CAAC,CAAC,CAAC;IACrE;IACA/D,MAAM,CAACyE,KAAK,CAAC,CAAC;EAChB,CAAC,CAAC;AACJ;AAQA,SAASJ,eAAeA,CAACK,IAAY,EAAY;EAC/C,MAAMC,MAAM,GAAG,IAAIC,WAAW,CAAC,CAAC,CAAC;EACjC,MAAMC,OAAO,GAAG,IAAIC,YAAY,CAACH,MAAM,CAAC;EACxCE,OAAO,CAAC,CAAC,CAAC,GAAGE,QAAQ,CAACL,IAAI,CAAC;EAC3B,OAAOM,KAAK,CAACtB,IAAI,CAAC,IAAIuB,UAAU,CAACN,MAAM,CAAC,CAAC,CAACO,OAAO,CAAC,CAAC;AACrD;AAUA,OAAO,eAAeC,YAAYA,CAChCzD,WAAmB,EACnB0D,QAAgB,EAChBC,OAAe,EACfzD,WAAmB,EACnB;EACA,MAAMH,wBAAwB,CAACC,WAAW,EAAE2D,OAAO,EAAE,CAAC,EAAED,QAAQ,EAAExD,WAAW,CAAC;EAC9EvB,OAAO,CAACC,GAAG,CAAE,GAAE8E,QAAS,aAAYC,OAAQ,GAAE,CAAC;AACjD;AASA,SAAS9D,mBAAmBA,CAACN,OAAY,EAAEI,WAAmB,EAAEC,WAAmB,EAAE;EACnF,OAAO,IAAIrB,OAAO,CAAEC,OAAO,IAAK;IAC9B,MAAMoF,UAAU,GAAGpG,gBAAgB,CAACoC,WAAW,CAAC;IAChDjB,OAAO,CAACC,GAAG,CAAE,sBAAqBe,WAAY,EAAC,CAAC;IAChDiE,UAAU,CAAClF,EAAE,CAAC,OAAO,EAAE,MAAM;MAC3BC,OAAO,CAACC,GAAG,CAAE,uBAAsBe,WAAY,EAAC,CAAC;MACjDnB,OAAO,CAAC,IAAI,CAAC;IACf,CAAC,CAAC;IACFe,OAAO,CAACsE,MAAM,CAACD,UAAU,EAAE;MAACE,IAAI,EAAEnE;IAAW,CAAC,CAAC;EACjD,CAAC,CAAC;AACJ"}
|
|
1
|
+
{"version":3,"file":"compress-util.js","names":["createGzip","createReadStream","createWriteStream","compressFileWithGzip","pathFile","compressedPathFile","gzip","input","output","Promise","resolve","reject","on","console","log","error","pipe"],"sources":["../../../src/lib/utils/compress-util.ts"],"sourcesContent":["import {createGzip} from 'zlib';\nimport {createReadStream, createWriteStream} from 'fs';\n\n/**\n * Compress file to gzip file\n *\n * @param pathFile - the path to the file\n * @return the path to the gzip file\n */\nexport function compressFileWithGzip(pathFile: string): Promise<string> {\n const compressedPathFile = `${pathFile}.gz`;\n const gzip = createGzip();\n const input = createReadStream(pathFile);\n const output = createWriteStream(compressedPathFile);\n\n return new Promise((resolve, reject) => {\n input.on('end', () => {\n console.log(`${compressedPathFile} compressed and saved.`); // eslint-disable-line no-undef,no-console\n resolve(compressedPathFile);\n });\n input.on('error', (error) => {\n console.log(`${compressedPathFile}: compression error!`); // eslint-disable-line no-undef,no-console\n reject(error);\n });\n input.pipe(gzip).pipe(output);\n });\n}\n"],"mappings":"AAAA,SAAQA,UAAU,QAAO,MAAM;AAC/B,SAAQC,gBAAgB,EAAEC,iBAAiB,QAAO,IAAI;AAQtD,OAAO,SAASC,oBAAoBA,CAACC,QAAgB,EAAmB;EACtE,MAAMC,kBAAkB,GAAI,GAAED,QAAS,KAAI;EAC3C,MAAME,IAAI,GAAGN,UAAU,CAAC,CAAC;EACzB,MAAMO,KAAK,GAAGN,gBAAgB,CAACG,QAAQ,CAAC;EACxC,MAAMI,MAAM,GAAGN,iBAAiB,CAACG,kBAAkB,CAAC;EAEpD,OAAO,IAAII,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCJ,KAAK,CAACK,EAAE,CAAC,KAAK,EAAE,MAAM;MACpBC,OAAO,CAACC,GAAG,CAAE,GAAET,kBAAmB,wBAAuB,CAAC;MAC1DK,OAAO,CAACL,kBAAkB,CAAC;IAC7B,CAAC,CAAC;IACFE,KAAK,CAACK,EAAE,CAAC,OAAO,EAAGG,KAAK,IAAK;MAC3BF,OAAO,CAACC,GAAG,CAAE,GAAET,kBAAmB,sBAAqB,CAAC;MACxDM,MAAM,CAACI,KAAK,CAAC;IACf,CAAC,CAAC;IACFR,KAAK,CAACS,IAAI,CAACV,IAAI,CAAC,CAACU,IAAI,CAACR,MAAM,CAAC;EAC/B,CAAC,CAAC;AACJ"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"conversion-dump.d.ts","sourceRoot":"","sources":["../../../src/lib/utils/conversion-dump.ts"],"names":[],"mappings":"AAIA,MAAM,MAAM,qBAAqB,GAAG;IAClC,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,CAAC;IACpB,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;IACd,WAAW,EAAE,MAAM,CAAC;IACpB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,OAAO,CAAC;IACf,cAAc,EAAE,OAAO,CAAC;IACxB,gBAAgB,EAAE,OAAO,CAAC;IAC1B,uBAAuB,EAAE,OAAO,CAAC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,OAAO,EAAE,OAAO,CAAC;CAClB,CAAC;AAEF,KAAK,cAAc,GAAG;IACpB,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,
|
|
1
|
+
{"version":3,"file":"conversion-dump.d.ts","sourceRoot":"","sources":["../../../src/lib/utils/conversion-dump.ts"],"names":[],"mappings":"AAIA,MAAM,MAAM,qBAAqB,GAAG;IAClC,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,CAAC;IACpB,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;IACd,WAAW,EAAE,MAAM,CAAC;IACpB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,OAAO,CAAC;IACf,cAAc,EAAE,OAAO,CAAC;IACxB,gBAAgB,EAAE,OAAO,CAAC;IAC1B,uBAAuB,EAAE,OAAO,CAAC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,OAAO,EAAE,OAAO,CAAC;CAClB,CAAC;AAEF,KAAK,cAAc,GAAG;IACpB,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,OAAO,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACnC,CAAC;AAEF,KAAK,cAAc,GAAG;IACpB,KAAK,EAAE,cAAc,EAAE,CAAC;CACzB,CAAC;AAEF,qBAAa,cAAc;IACzB,yBAAyB;IACzB,OAAO,CAAC,OAAO,CAAC,CAAwB;IACxC,2CAA2C;IAC3C,cAAc,EAAE,MAAM,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC;;IAM/C;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;IA2CnE;;OAEG;YACW,cAAc;IAiB5B;;OAEG;IACG,cAAc,IAAI,OAAO,CAAC,IAAI,CAAC;IAQrC;;;;OAIG;IACH,OAAO,CAAC,SAAS;IAIjB;;;;OAIG;IACH,OAAO,CAAC,SAAS;IAIjB;;;;OAIG;IACG,OAAO,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;IAS9C;;;;;;OAMG;IACH,gBAAgB,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO;IAYvF;;;;OAIG;IACG,wBAAwB,CAC5B,cAAc,EAAE;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,YAAY,CAAC,EAAE,MAAM,CAAA;KAAC,EAAE,EAC/E,YAAY,EAAE,oBAAoB,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;CA0BtD"}
|
|
@@ -79,7 +79,8 @@ export class ConversionDump {
|
|
|
79
79
|
};
|
|
80
80
|
nodes.push({
|
|
81
81
|
nodeId,
|
|
82
|
-
done:
|
|
82
|
+
done: false,
|
|
83
|
+
progress: {}
|
|
83
84
|
});
|
|
84
85
|
if (nodes.length === 1) {
|
|
85
86
|
this.setRecord(filename, {
|
|
@@ -92,7 +93,10 @@ export class ConversionDump {
|
|
|
92
93
|
var _this$tilesConverted$;
|
|
93
94
|
const nodeDump = (_this$tilesConverted$ = this.tilesConverted[filename]) === null || _this$tilesConverted$ === void 0 ? void 0 : _this$tilesConverted$.nodes.find(element => element.nodeId === nodeId);
|
|
94
95
|
if (nodeDump) {
|
|
95
|
-
nodeDump.
|
|
96
|
+
nodeDump.progress[resourceType] = value;
|
|
97
|
+
if (!value) {
|
|
98
|
+
nodeDump.done = false;
|
|
99
|
+
}
|
|
96
100
|
}
|
|
97
101
|
}
|
|
98
102
|
async updateConvertedTilesDump(changedRecords, writeResults) {
|
|
@@ -105,18 +109,18 @@ export class ConversionDump {
|
|
|
105
109
|
} = changedRecords[i];
|
|
106
110
|
if (!sourceId || !resourceType || !outputId) continue;
|
|
107
111
|
for (const node of this.tilesConverted[sourceId].nodes) {
|
|
108
|
-
if (
|
|
109
|
-
node.
|
|
110
|
-
}
|
|
111
|
-
if (typeof node.done !== 'boolean') {
|
|
112
|
+
if (node.nodeId === outputId) {
|
|
113
|
+
node.progress[resourceType] = true;
|
|
112
114
|
let done = false;
|
|
113
|
-
for (const key in node.
|
|
114
|
-
done = node.
|
|
115
|
+
for (const key in node.progress) {
|
|
116
|
+
done = node.progress[key];
|
|
115
117
|
if (!done) break;
|
|
116
118
|
}
|
|
117
|
-
|
|
118
|
-
|
|
119
|
+
node.done = done;
|
|
120
|
+
if (node.done) {
|
|
121
|
+
node.progress = {};
|
|
119
122
|
}
|
|
123
|
+
break;
|
|
120
124
|
}
|
|
121
125
|
}
|
|
122
126
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"conversion-dump.js","names":["DUMP_FILE_SUFFIX","removeFile","writeFile","join","ConversionDump","constructor","options","tilesConverted","createDumpFile","tilesetName","slpk","egmFilePath","inputUrl","outputPath","draco","maxDepth","token","generateTextures","generateBoundingVolumes","mergeMaterials","metadataClass","analyze","JSON","stringify","error","console","log","updateDumpFile","_this$options","deleteDumpFile","_this$options2","getRecord","fileName","setRecord","object","addNode","filename","nodeId","nodes","push","done","length","updateDoneStatus","resourceType","value","_this$tilesConverted$","nodeDump","find","element","updateConvertedTilesDump","changedRecords","writeResults","i","sourceId","outputId","node","key"],"sources":["../../../src/lib/utils/conversion-dump.ts"],"sourcesContent":["import {DUMP_FILE_SUFFIX} from '../../constants';\nimport {removeFile, writeFile} from './file-utils';\nimport {join} from 'path';\n\nexport type ConversionDumpOptions = {\n inputUrl: string;\n outputPath: string;\n tilesetName: string;\n maxDepth: number;\n slpk: boolean;\n egmFilePath: string;\n token: string;\n draco: boolean;\n mergeMaterials: boolean;\n generateTextures: boolean;\n generateBoundingVolumes: boolean;\n metadataClass: string;\n analyze: boolean;\n};\n\ntype NodeDoneStatus = {\n nodeId: number;\n done: Record<string, boolean
|
|
1
|
+
{"version":3,"file":"conversion-dump.js","names":["DUMP_FILE_SUFFIX","removeFile","writeFile","join","ConversionDump","constructor","options","tilesConverted","createDumpFile","tilesetName","slpk","egmFilePath","inputUrl","outputPath","draco","maxDepth","token","generateTextures","generateBoundingVolumes","mergeMaterials","metadataClass","analyze","JSON","stringify","error","console","log","updateDumpFile","_this$options","deleteDumpFile","_this$options2","getRecord","fileName","setRecord","object","addNode","filename","nodeId","nodes","push","done","progress","length","updateDoneStatus","resourceType","value","_this$tilesConverted$","nodeDump","find","element","updateConvertedTilesDump","changedRecords","writeResults","i","sourceId","outputId","node","key"],"sources":["../../../src/lib/utils/conversion-dump.ts"],"sourcesContent":["import {DUMP_FILE_SUFFIX} from '../../constants';\nimport {removeFile, writeFile} from './file-utils';\nimport {join} from 'path';\n\nexport type ConversionDumpOptions = {\n inputUrl: string;\n outputPath: string;\n tilesetName: string;\n maxDepth: number;\n slpk: boolean;\n egmFilePath: string;\n token: string;\n draco: boolean;\n mergeMaterials: boolean;\n generateTextures: boolean;\n generateBoundingVolumes: boolean;\n metadataClass: string;\n analyze: boolean;\n};\n\ntype NodeDoneStatus = {\n nodeId: number;\n done: boolean;\n progress: Record<string, boolean>;\n};\n\ntype TilesConverted = {\n nodes: NodeDoneStatus[];\n};\n\nexport class ConversionDump {\n /** Conversion options */\n private options?: ConversionDumpOptions;\n /** Tiles conversion progress status map */\n tilesConverted: Record<string, TilesConverted>;\n\n constructor() {\n this.tilesConverted = {};\n }\n\n /**\n * Create a dump file with convertion options\n * @param options - converter options\n */\n async createDumpFile(options: ConversionDumpOptions): Promise<void> {\n const {\n tilesetName,\n slpk,\n egmFilePath,\n inputUrl,\n outputPath,\n draco = true,\n maxDepth,\n token,\n generateTextures,\n generateBoundingVolumes,\n mergeMaterials = true,\n metadataClass,\n analyze = false\n } = options;\n this.options = {\n tilesetName,\n slpk,\n egmFilePath,\n inputUrl,\n outputPath,\n draco,\n maxDepth,\n token,\n generateTextures,\n generateBoundingVolumes,\n mergeMaterials,\n metadataClass,\n analyze\n };\n\n try {\n await writeFile(\n options.outputPath,\n JSON.stringify({options: this.options}),\n `${options.tilesetName}${DUMP_FILE_SUFFIX}`\n );\n } catch (error) {\n console.log(\"Can't create dump file\", error);\n }\n }\n\n /**\n * Update conversion status in the dump file\n */\n private async updateDumpFile(): Promise<void> {\n if (this.options?.outputPath && this.options.tilesetName) {\n try {\n await writeFile(\n this.options.outputPath,\n JSON.stringify({\n options: this.options,\n tilesConverted: this.tilesConverted\n }),\n `${this.options.tilesetName}${DUMP_FILE_SUFFIX}`\n );\n } catch (error) {\n console.log(\"Can't update dump file\", error);\n }\n }\n }\n\n /**\n * Delete a dump file\n */\n async deleteDumpFile(): Promise<void> {\n if (this.options?.outputPath && this.options.tilesetName) {\n await removeFile(\n join(this.options.outputPath, `${this.options.tilesetName}${DUMP_FILE_SUFFIX}`)\n );\n }\n }\n\n /**\n * Get record from the tilesConverted Map\n * @param fileName - source filename\n * @returns existing object from the tilesConverted Map\n */\n private getRecord(fileName: string) {\n return this.tilesConverted[fileName];\n }\n\n /**\n * Set a record for the dump file\n * @param fileName - key - source filename\n * @param object - value\n */\n private setRecord(fileName: string, object: any) {\n this.tilesConverted[fileName] = object;\n }\n\n /**\n * Add a node into the dump file for the source file record\n * @param fileName - source filename\n * @param nodeId - nodeId of the node\n */\n async addNode(filename: string, nodeId: number) {\n const {nodes} = this.getRecord(filename) || {nodes: []};\n nodes.push({nodeId, done: false, progress: {}});\n if (nodes.length === 1) {\n this.setRecord(filename, {nodes});\n }\n await this.updateDumpFile();\n }\n\n /**\n * Update done status object for the writing resources\n * @param fileName - key - source filename\n * @param nodeId - nodeId for the source filename\n * @param resourceType - resource type to update status\n * @param value - value\n */\n updateDoneStatus(filename: string, nodeId: number, resourceType: string, value: boolean) {\n const nodeDump = this.tilesConverted[filename]?.nodes.find(\n (element) => element.nodeId === nodeId\n );\n if (nodeDump) {\n nodeDump.progress[resourceType] = value;\n if (!value) {\n nodeDump.done = false;\n }\n }\n }\n\n /**\n * Update dump file according to writing results\n * @param changedRecords - array of parameters ids for the written resources\n * @param writeResults - array of writing resource files results\n */\n async updateConvertedTilesDump(\n changedRecords: {outputId?: number; sourceId?: string; resourceType?: string}[],\n writeResults: PromiseSettledResult<string | null>[]\n ) {\n for (let i = 0; i < changedRecords.length; i++) {\n if (changedRecords[i] && 'value' in writeResults[i]) {\n const {sourceId, resourceType, outputId} = changedRecords[i];\n if (!sourceId || !resourceType || !outputId) continue;\n for (const node of this.tilesConverted[sourceId].nodes) {\n if (node.nodeId === outputId) {\n node.progress[resourceType] = true;\n\n let done = false;\n for (const key in node.progress) {\n done = node.progress[key];\n if (!done) break;\n }\n node.done = done;\n if (node.done) {\n node.progress = {};\n }\n break;\n }\n }\n }\n }\n await this.updateDumpFile();\n }\n}\n"],"mappings":"SAAQA,gBAAgB;AAAA,SAChBC,UAAU,EAAEC,SAAS;AAC7B,SAAQC,IAAI,QAAO,MAAM;AA4BzB,OAAO,MAAMC,cAAc,CAAC;EAM1BC,WAAWA,CAAA,EAAG;IAAA,KAJNC,OAAO;IAAA,KAEfC,cAAc;IAGZ,IAAI,CAACA,cAAc,GAAG,CAAC,CAAC;EAC1B;EAMA,MAAMC,cAAcA,CAACF,OAA8B,EAAiB;IAClE,MAAM;MACJG,WAAW;MACXC,IAAI;MACJC,WAAW;MACXC,QAAQ;MACRC,UAAU;MACVC,KAAK,GAAG,IAAI;MACZC,QAAQ;MACRC,KAAK;MACLC,gBAAgB;MAChBC,uBAAuB;MACvBC,cAAc,GAAG,IAAI;MACrBC,aAAa;MACbC,OAAO,GAAG;IACZ,CAAC,GAAGf,OAAO;IACX,IAAI,CAACA,OAAO,GAAG;MACbG,WAAW;MACXC,IAAI;MACJC,WAAW;MACXC,QAAQ;MACRC,UAAU;MACVC,KAAK;MACLC,QAAQ;MACRC,KAAK;MACLC,gBAAgB;MAChBC,uBAAuB;MACvBC,cAAc;MACdC,aAAa;MACbC;IACF,CAAC;IAED,IAAI;MACF,MAAMnB,SAAS,CACbI,OAAO,CAACO,UAAU,EAClBS,IAAI,CAACC,SAAS,CAAC;QAACjB,OAAO,EAAE,IAAI,CAACA;MAAO,CAAC,CAAC,EACtC,GAAEA,OAAO,CAACG,WAAY,GAAET,gBAAiB,EAC5C,CAAC;IACH,CAAC,CAAC,OAAOwB,KAAK,EAAE;MACdC,OAAO,CAACC,GAAG,CAAC,wBAAwB,EAAEF,KAAK,CAAC;IAC9C;EACF;EAKA,MAAcG,cAAcA,CAAA,EAAkB;IAAA,IAAAC,aAAA;IAC5C,IAAI,CAAAA,aAAA,OAAI,CAACtB,OAAO,cAAAsB,aAAA,eAAZA,aAAA,CAAcf,UAAU,IAAI,IAAI,CAACP,OAAO,CAACG,WAAW,EAAE;MACxD,IAAI;QACF,MAAMP,SAAS,CACb,IAAI,CAACI,OAAO,CAACO,UAAU,EACvBS,IAAI,CAACC,SAAS,CAAC;UACbjB,OAAO,EAAE,IAAI,CAACA,OAAO;UACrBC,cAAc,EAAE,IAAI,CAACA;QACvB,CAAC,CAAC,EACD,GAAE,IAAI,CAACD,OAAO,CAACG,WAAY,GAAET,gBAAiB,EACjD,CAAC;MACH,CAAC,CAAC,OAAOwB,KAAK,EAAE;QACdC,OAAO,CAACC,GAAG,CAAC,wBAAwB,EAAEF,KAAK,CAAC;MAC9C;IACF;EACF;EAKA,MAAMK,cAAcA,CAAA,EAAkB;IAAA,IAAAC,cAAA;IACpC,IAAI,CAAAA,cAAA,OAAI,CAACxB,OAAO,cAAAwB,cAAA,eAAZA,cAAA,CAAcjB,UAAU,IAAI,IAAI,CAACP,OAAO,CAACG,WAAW,EAAE;MACxD,MAAMR,UAAU,CACdE,IAAI,CAAC,IAAI,CAACG,OAAO,CAACO,UAAU,EAAG,GAAE,IAAI,CAACP,OAAO,CAACG,WAAY,GAAET,gBAAiB,EAAC,CAChF,CAAC;IACH;EACF;EAOQ+B,SAASA,CAACC,QAAgB,EAAE;IAClC,OAAO,IAAI,CAACzB,cAAc,CAACyB,QAAQ,CAAC;EACtC;EAOQC,SAASA,CAACD,QAAgB,EAAEE,MAAW,EAAE;IAC/C,IAAI,CAAC3B,cAAc,CAACyB,QAAQ,CAAC,GAAGE,MAAM;EACxC;EAOA,MAAMC,OAAOA,CAACC,QAAgB,EAAEC,MAAc,EAAE;IAC9C,MAAM;MAACC;IAAK,CAAC,GAAG,IAAI,CAACP,SAAS,CAACK,QAAQ,CAAC,IAAI;MAACE,KAAK,EAAE;IAAE,CAAC;IACvDA,KAAK,CAACC,IAAI,CAAC;MAACF,MAAM;MAAEG,IAAI,EAAE,KAAK;MAAEC,QAAQ,EAAE,CAAC;IAAC,CAAC,CAAC;IAC/C,IAAIH,KAAK,CAACI,MAAM,KAAK,CAAC,EAAE;MACtB,IAAI,CAACT,SAAS,CAACG,QAAQ,EAAE;QAACE;MAAK,CAAC,CAAC;IACnC;IACA,MAAM,IAAI,CAACX,cAAc,CAAC,CAAC;EAC7B;EASAgB,gBAAgBA,CAACP,QAAgB,EAAEC,MAAc,EAAEO,YAAoB,EAAEC,KAAc,EAAE;IAAA,IAAAC,qBAAA;IACvF,MAAMC,QAAQ,IAAAD,qBAAA,GAAG,IAAI,CAACvC,cAAc,CAAC6B,QAAQ,CAAC,cAAAU,qBAAA,uBAA7BA,qBAAA,CAA+BR,KAAK,CAACU,IAAI,CACvDC,OAAO,IAAKA,OAAO,CAACZ,MAAM,KAAKA,MAClC,CAAC;IACD,IAAIU,QAAQ,EAAE;MACZA,QAAQ,CAACN,QAAQ,CAACG,YAAY,CAAC,GAAGC,KAAK;MACvC,IAAI,CAACA,KAAK,EAAE;QACVE,QAAQ,CAACP,IAAI,GAAG,KAAK;MACvB;IACF;EACF;EAOA,MAAMU,wBAAwBA,CAC5BC,cAA+E,EAC/EC,YAAmD,EACnD;IACA,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,cAAc,CAACT,MAAM,EAAEW,CAAC,EAAE,EAAE;MAC9C,IAAIF,cAAc,CAACE,CAAC,CAAC,IAAI,OAAO,IAAID,YAAY,CAACC,CAAC,CAAC,EAAE;QACnD,MAAM;UAACC,QAAQ;UAAEV,YAAY;UAAEW;QAAQ,CAAC,GAAGJ,cAAc,CAACE,CAAC,CAAC;QAC5D,IAAI,CAACC,QAAQ,IAAI,CAACV,YAAY,IAAI,CAACW,QAAQ,EAAE;QAC7C,KAAK,MAAMC,IAAI,IAAI,IAAI,CAACjD,cAAc,CAAC+C,QAAQ,CAAC,CAAChB,KAAK,EAAE;UACtD,IAAIkB,IAAI,CAACnB,MAAM,KAAKkB,QAAQ,EAAE;YAC5BC,IAAI,CAACf,QAAQ,CAACG,YAAY,CAAC,GAAG,IAAI;YAElC,IAAIJ,IAAI,GAAG,KAAK;YAChB,KAAK,MAAMiB,GAAG,IAAID,IAAI,CAACf,QAAQ,EAAE;cAC/BD,IAAI,GAAGgB,IAAI,CAACf,QAAQ,CAACgB,GAAG,CAAC;cACzB,IAAI,CAACjB,IAAI,EAAE;YACb;YACAgB,IAAI,CAAChB,IAAI,GAAGA,IAAI;YAChB,IAAIgB,IAAI,CAAChB,IAAI,EAAE;cACbgB,IAAI,CAACf,QAAQ,GAAG,CAAC,CAAC;YACpB;YACA;UACF;QACF;MACF;IACF;IACA,MAAM,IAAI,CAACd,cAAc,CAAC,CAAC;EAC7B;AACF"}
|
package/dist/pgm-loader.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { Geoid, parsePGM } from '@math.gl/geoid';
|
|
2
|
-
const VERSION = typeof "4.1.0-alpha.
|
|
2
|
+
const VERSION = typeof "4.1.0-alpha.11" !== 'undefined' ? "4.1.0-alpha.11" : 'latest';
|
|
3
3
|
export { Geoid };
|
|
4
4
|
export const PGMLoader = {
|
|
5
5
|
name: 'PGM - Netpbm grayscale image format',
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/tile-converter",
|
|
3
|
-
"version": "4.1.0-alpha.
|
|
3
|
+
"version": "4.1.0-alpha.11",
|
|
4
4
|
"description": "Converter",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -52,18 +52,18 @@
|
|
|
52
52
|
"build-i3s-server-bundle": "esbuild src/i3s-server/bin/www.ts --outfile=dist/i3s-server/bin/i3s-server.min.cjs --platform=node --target=esnext,node14 --minify --bundle --define:__VERSION__=\\\"$npm_package_version\\\""
|
|
53
53
|
},
|
|
54
54
|
"dependencies": {
|
|
55
|
-
"@loaders.gl/3d-tiles": "4.1.0-alpha.
|
|
56
|
-
"@loaders.gl/crypto": "4.1.0-alpha.
|
|
57
|
-
"@loaders.gl/draco": "4.1.0-alpha.
|
|
58
|
-
"@loaders.gl/gltf": "4.1.0-alpha.
|
|
59
|
-
"@loaders.gl/i3s": "4.1.0-alpha.
|
|
60
|
-
"@loaders.gl/images": "4.1.0-alpha.
|
|
61
|
-
"@loaders.gl/loader-utils": "4.1.0-alpha.
|
|
62
|
-
"@loaders.gl/polyfills": "4.1.0-alpha.
|
|
63
|
-
"@loaders.gl/textures": "4.1.0-alpha.
|
|
64
|
-
"@loaders.gl/tiles": "4.1.0-alpha.
|
|
65
|
-
"@loaders.gl/worker-utils": "4.1.0-alpha.
|
|
66
|
-
"@loaders.gl/zip": "4.1.0-alpha.
|
|
55
|
+
"@loaders.gl/3d-tiles": "4.1.0-alpha.11",
|
|
56
|
+
"@loaders.gl/crypto": "4.1.0-alpha.11",
|
|
57
|
+
"@loaders.gl/draco": "4.1.0-alpha.11",
|
|
58
|
+
"@loaders.gl/gltf": "4.1.0-alpha.11",
|
|
59
|
+
"@loaders.gl/i3s": "4.1.0-alpha.11",
|
|
60
|
+
"@loaders.gl/images": "4.1.0-alpha.11",
|
|
61
|
+
"@loaders.gl/loader-utils": "4.1.0-alpha.11",
|
|
62
|
+
"@loaders.gl/polyfills": "4.1.0-alpha.11",
|
|
63
|
+
"@loaders.gl/textures": "4.1.0-alpha.11",
|
|
64
|
+
"@loaders.gl/tiles": "4.1.0-alpha.11",
|
|
65
|
+
"@loaders.gl/worker-utils": "4.1.0-alpha.11",
|
|
66
|
+
"@loaders.gl/zip": "4.1.0-alpha.11",
|
|
67
67
|
"@math.gl/core": "^4.0.0",
|
|
68
68
|
"@math.gl/culling": "^4.0.0",
|
|
69
69
|
"@math.gl/geoid": "^4.0.0",
|
|
@@ -87,7 +87,7 @@
|
|
|
87
87
|
"join-images": "^1.1.3",
|
|
88
88
|
"sharp": "^0.31.3"
|
|
89
89
|
},
|
|
90
|
-
"gitHead": "
|
|
90
|
+
"gitHead": "5d3e23bf93762b48c8c1d6d926ede7a97fe43ab0",
|
|
91
91
|
"devDependencies": {
|
|
92
92
|
"@types/express": "^4.17.17",
|
|
93
93
|
"@types/node": "^20.4.2"
|
|
@@ -9,6 +9,9 @@ import {generateSyntheticIndices} from '../../lib/utils/geometry-utils';
|
|
|
9
9
|
|
|
10
10
|
const Z_UP_TO_Y_UP_MATRIX = new Matrix4([1, 0, 0, 0, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 1]);
|
|
11
11
|
const scratchVector = new Vector3();
|
|
12
|
+
const KHR_MATERIALS_UNLIT = 'KHR_materials_unlit';
|
|
13
|
+
const METALLIC_FACTOR_DEFAULT = 1.0;
|
|
14
|
+
const ROUGHNESS_FACTOR_DEFAULT = 1.0;
|
|
12
15
|
|
|
13
16
|
export type I3SAttributesData = {
|
|
14
17
|
tileContent: I3STileContent;
|
|
@@ -60,6 +63,22 @@ export default class B3dmConverter {
|
|
|
60
63
|
const gltfBuilder = new GLTFScenegraph();
|
|
61
64
|
|
|
62
65
|
const textureIndex = await this._addI3sTextureToGLTF(tileContent, textureFormat, gltfBuilder);
|
|
66
|
+
|
|
67
|
+
// Add KHR_MATERIALS_UNLIT extension in the following cases:
|
|
68
|
+
// - metallicFactor or roughnessFactor are set to default values
|
|
69
|
+
// - metallicFactor or roughnessFactor are not set
|
|
70
|
+
const pbrMetallicRoughness = material?.pbrMetallicRoughness;
|
|
71
|
+
if (
|
|
72
|
+
pbrMetallicRoughness &&
|
|
73
|
+
(pbrMetallicRoughness.metallicFactor === undefined ||
|
|
74
|
+
pbrMetallicRoughness.metallicFactor === METALLIC_FACTOR_DEFAULT) &&
|
|
75
|
+
(pbrMetallicRoughness.roughnessFactor === undefined ||
|
|
76
|
+
pbrMetallicRoughness.roughnessFactor === ROUGHNESS_FACTOR_DEFAULT)
|
|
77
|
+
) {
|
|
78
|
+
gltfBuilder.addObjectExtension(material, KHR_MATERIALS_UNLIT, {});
|
|
79
|
+
gltfBuilder.addExtension(KHR_MATERIALS_UNLIT);
|
|
80
|
+
}
|
|
81
|
+
|
|
63
82
|
const pbrMaterialInfo = this._convertI3sMaterialToGLTFMaterial(material, textureIndex);
|
|
64
83
|
const materialIndex = gltfBuilder.addMaterial(pbrMaterialInfo);
|
|
65
84
|
|
package/src/converter-cli.ts
CHANGED
|
@@ -11,7 +11,7 @@ import {
|
|
|
11
11
|
getURLValue,
|
|
12
12
|
validateOptionsWithEqual
|
|
13
13
|
} from './lib/utils/cli-utils';
|
|
14
|
-
import {addOneFile, composeHashFile} from '@loaders.gl/zip';
|
|
14
|
+
import {addOneFile, composeHashFile, makeZipCDHeaderIterator} from '@loaders.gl/zip';
|
|
15
15
|
import {FileHandleFile} from '@loaders.gl/loader-utils';
|
|
16
16
|
import {copyFile} from 'node:fs/promises';
|
|
17
17
|
|
|
@@ -133,7 +133,7 @@ async function main() {
|
|
|
133
133
|
if (finalPath !== validatedOptions.tileset) {
|
|
134
134
|
await copyFile(validatedOptions.tileset, finalPath);
|
|
135
135
|
}
|
|
136
|
-
const hashTable = await composeHashFile(new FileHandleFile(finalPath));
|
|
136
|
+
const hashTable = await composeHashFile(makeZipCDHeaderIterator(new FileHandleFile(finalPath)));
|
|
137
137
|
await addOneFile(finalPath, hashTable, '@specialIndexFileHASH128@');
|
|
138
138
|
|
|
139
139
|
return;
|
|
@@ -30,12 +30,7 @@ import md5 from 'md5';
|
|
|
30
30
|
|
|
31
31
|
import NodePages from './helpers/node-pages';
|
|
32
32
|
import {writeFile, removeDir, writeFileForSlpk, removeFile} from '../lib/utils/file-utils';
|
|
33
|
-
import {
|
|
34
|
-
compressFileWithGzip,
|
|
35
|
-
compressWithChildProcess
|
|
36
|
-
// generateHash128FromZip,
|
|
37
|
-
// addFileToZip
|
|
38
|
-
} from '../lib/utils/compress-util';
|
|
33
|
+
import {compressFileWithGzip} from '../lib/utils/compress-util';
|
|
39
34
|
import {calculateFilesSize, timeConverter} from '../lib/utils/statistic-utills';
|
|
40
35
|
import convertB3dmToI3sGeometry, {getPropertyTable} from './helpers/geometry-converter';
|
|
41
36
|
import {
|
|
@@ -51,7 +46,7 @@ import {GEOMETRY_DEFINITION as geometryDefinitionTemlate} from './json-templates
|
|
|
51
46
|
import {SHARED_RESOURCES as sharedResourcesTemplate} from './json-templates/shared-resources';
|
|
52
47
|
import {validateNodeBoundingVolumes} from './helpers/node-debug';
|
|
53
48
|
import {KTX2BasisWriterWorker} from '@loaders.gl/textures';
|
|
54
|
-
import {
|
|
49
|
+
import {LoaderWithParser} from '@loaders.gl/loader-utils';
|
|
55
50
|
import {I3SMaterialDefinition, TextureSetDefinitionFormats} from '@loaders.gl/i3s';
|
|
56
51
|
import {ImageWriter} from '@loaders.gl/images';
|
|
57
52
|
import {GLTFImagePostprocessed} from '@loaders.gl/gltf';
|
|
@@ -82,7 +77,7 @@ import {createBoundingVolume} from '@loaders.gl/tiles';
|
|
|
82
77
|
import {TraversalConversionProps, traverseDatasetWith} from './helpers/tileset-traversal';
|
|
83
78
|
import {analyzeTileContent, mergePreprocessData} from './helpers/preprocess-3d-tiles';
|
|
84
79
|
import {Progress} from './helpers/progress';
|
|
85
|
-
import {
|
|
80
|
+
import {composeHashFile, createZip} from '@loaders.gl/zip';
|
|
86
81
|
import {ConversionDump, ConversionDumpOptions} from '../lib/utils/conversion-dump';
|
|
87
82
|
|
|
88
83
|
const ION_DEFAULT_TOKEN = process.env?.IonToken;
|
|
@@ -567,35 +562,12 @@ export default class I3SConverter {
|
|
|
567
562
|
if (this.options.slpk) {
|
|
568
563
|
const slpkTilesetPath = join(tilesetPath, 'SceneServer', 'layers', '0');
|
|
569
564
|
const slpkFileName = `${tilesetPath}.slpk`;
|
|
570
|
-
await compressWithChildProcess(
|
|
571
|
-
slpkTilesetPath,
|
|
572
|
-
slpkFileName,
|
|
573
|
-
0,
|
|
574
|
-
'.',
|
|
575
|
-
this.options.sevenZipExe
|
|
576
|
-
);
|
|
577
565
|
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
// try {
|
|
584
|
-
// await generateHash128FromZip(slpkFileName, fileHash128Path);
|
|
585
|
-
// await addFileToZip(
|
|
586
|
-
// tilesetPath,
|
|
587
|
-
// '@specialIndexFileHASH128@',
|
|
588
|
-
// slpkFileName,
|
|
589
|
-
// this.options.sevenZipExe
|
|
590
|
-
// );
|
|
591
|
-
// } catch (error) {
|
|
592
|
-
// if (error.code === FS_FILE_TOO_LARGE) {
|
|
593
|
-
// console.warn(`${slpkFileName} file is too big to generate a hash`); // eslint-disable-line
|
|
594
|
-
// } else {
|
|
595
|
-
// console.error(error); // eslint-disable-line
|
|
596
|
-
// }
|
|
597
|
-
// }
|
|
598
|
-
// All converted files are contained in slpk now they can be deleted
|
|
566
|
+
await createZip(slpkTilesetPath, slpkFileName, async (fileList) => ({
|
|
567
|
+
path: '@specialIndexFileHASH128@',
|
|
568
|
+
file: await composeHashFile(fileList)
|
|
569
|
+
}));
|
|
570
|
+
|
|
599
571
|
try {
|
|
600
572
|
await removeDir(tilesetPath);
|
|
601
573
|
} catch (e) {
|