@loaders.gl/tile-converter 4.1.0-alpha.9 → 4.2.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/3d-tiles-converter/3d-tiles-converter.d.ts.map +1 -1
- package/dist/3d-tiles-converter/3d-tiles-converter.js +4 -4
- package/dist/3d-tiles-converter/3d-tiles-converter.js.map +1 -1
- package/dist/3d-tiles-converter/helpers/b3dm-converter.d.ts.map +1 -1
- package/dist/3d-tiles-converter/helpers/b3dm-converter.js +8 -0
- package/dist/3d-tiles-converter/helpers/b3dm-converter.js.map +1 -1
- package/dist/3d-tiles-converter/json-templates/tileset.d.ts.map +1 -1
- package/dist/3d-tiles-converter/json-templates/tileset.js +3 -0
- package/dist/3d-tiles-converter/json-templates/tileset.js.map +1 -1
- package/dist/constants.d.ts +1 -0
- package/dist/constants.d.ts.map +1 -1
- package/dist/constants.js +1 -0
- package/dist/constants.js.map +1 -1
- package/dist/converter-cli.js +3 -3
- package/dist/converter-cli.js.map +1 -1
- package/dist/converter.min.cjs +131 -220
- package/dist/deps-installer/deps-installer.js +1 -1
- package/dist/i3s-converter/helpers/attribute-metadata-info.d.ts +10 -0
- package/dist/i3s-converter/helpers/attribute-metadata-info.d.ts.map +1 -1
- package/dist/i3s-converter/helpers/attribute-metadata-info.js +5 -0
- package/dist/i3s-converter/helpers/attribute-metadata-info.js.map +1 -1
- package/dist/i3s-converter/helpers/node-index-document.d.ts +2 -1
- package/dist/i3s-converter/helpers/node-index-document.d.ts.map +1 -1
- package/dist/i3s-converter/helpers/node-index-document.js +6 -8
- package/dist/i3s-converter/helpers/node-index-document.js.map +1 -1
- package/dist/i3s-converter/helpers/progress.js +1 -1
- package/dist/i3s-converter/helpers/progress.js.map +1 -1
- package/dist/i3s-converter/i3s-converter.d.ts +32 -0
- package/dist/i3s-converter/i3s-converter.d.ts.map +1 -1
- package/dist/i3s-converter/i3s-converter.js +195 -47
- package/dist/i3s-converter/i3s-converter.js.map +1 -1
- package/dist/i3s-converter/types.d.ts +7 -0
- package/dist/i3s-converter/types.d.ts.map +1 -1
- package/dist/i3s-converter/types.js +8 -0
- package/dist/i3s-converter/types.js.map +1 -1
- package/dist/i3s-server/bin/i3s-server.min.cjs +76 -76
- package/dist/index.cjs +638 -139
- package/dist/lib/utils/compress-util.d.ts +0 -37
- package/dist/lib/utils/compress-util.d.ts.map +1 -1
- package/dist/lib/utils/compress-util.js +1 -149
- package/dist/lib/utils/compress-util.js.map +1 -1
- package/dist/lib/utils/conversion-dump.d.ts +131 -0
- package/dist/lib/utils/conversion-dump.d.ts.map +1 -0
- package/dist/lib/utils/conversion-dump.js +191 -0
- package/dist/lib/utils/conversion-dump.js.map +1 -0
- package/dist/lib/utils/statistic-utills.js +1 -1
- package/dist/lib/utils/statistic-utills.js.map +1 -1
- package/dist/lib/utils/write-queue.d.ts +6 -1
- package/dist/lib/utils/write-queue.d.ts.map +1 -1
- package/dist/lib/utils/write-queue.js +15 -3
- package/dist/lib/utils/write-queue.js.map +1 -1
- package/dist/pgm-loader.js +1 -1
- package/dist/slpk-extractor.min.cjs +31 -31
- package/package.json +16 -16
- package/src/3d-tiles-converter/3d-tiles-converter.ts +5 -4
- package/src/3d-tiles-converter/helpers/b3dm-converter.ts +19 -0
- package/src/3d-tiles-converter/json-templates/tileset.ts +3 -0
- package/src/constants.ts +1 -0
- package/src/converter-cli.ts +3 -3
- package/src/i3s-converter/helpers/attribute-metadata-info.ts +16 -0
- package/src/i3s-converter/helpers/node-index-document.ts +18 -8
- package/src/i3s-converter/helpers/progress.ts +1 -1
- package/src/i3s-converter/i3s-converter.ts +385 -93
- package/src/i3s-converter/types.ts +8 -0
- package/src/lib/utils/compress-util.ts +1 -264
- package/src/lib/utils/conversion-dump.ts +325 -0
- package/src/lib/utils/statistic-utills.ts +1 -1
- package/src/lib/utils/write-queue.ts +15 -2
|
@@ -241,3 +241,11 @@ export const AttributeType = {
|
|
|
241
241
|
/** Integer data type name for feature attributes */
|
|
242
242
|
SHORT_INT_TYPE: 'Int32'
|
|
243
243
|
} as const;
|
|
244
|
+
|
|
245
|
+
export enum ResourceType {
|
|
246
|
+
ATTRIBUTES = 'ATTRIBUTES',
|
|
247
|
+
DRACO_GEOMETRY = 'DRACO_GEOMETRY',
|
|
248
|
+
GEOMETRY = 'GEOMETRY',
|
|
249
|
+
SHARED = 'SHARED',
|
|
250
|
+
TEXTURE = 'TEXTURE'
|
|
251
|
+
}
|
|
@@ -1,13 +1,5 @@
|
|
|
1
1
|
import {createGzip} from 'zlib';
|
|
2
|
-
import {
|
|
3
|
-
import {promises as fs, createReadStream, createWriteStream} from 'fs';
|
|
4
|
-
import archiver from 'archiver';
|
|
5
|
-
import {removeFile} from './file-utils';
|
|
6
|
-
import {ChildProcessProxy} from '@loaders.gl/worker-utils';
|
|
7
|
-
import JSZip from 'jszip';
|
|
8
|
-
import {MD5Hash} from '@loaders.gl/crypto';
|
|
9
|
-
import crypt from 'crypt';
|
|
10
|
-
import {getAbsoluteFilePath} from './file-utils';
|
|
2
|
+
import {createReadStream, createWriteStream} from 'fs';
|
|
11
3
|
|
|
12
4
|
/**
|
|
13
5
|
* Compress file to gzip file
|
|
@@ -33,258 +25,3 @@ export function compressFileWithGzip(pathFile: string): Promise<string> {
|
|
|
33
25
|
input.pipe(gzip).pipe(output);
|
|
34
26
|
});
|
|
35
27
|
}
|
|
36
|
-
|
|
37
|
-
/**
|
|
38
|
-
* Compress files from map into slpk file
|
|
39
|
-
*
|
|
40
|
-
* @param fileMap - map with file paths (key: output path, value: input path)
|
|
41
|
-
* @param outputFile - output slpk file
|
|
42
|
-
* @param level - compression level
|
|
43
|
-
*/
|
|
44
|
-
export async function compressFilesWithZip(
|
|
45
|
-
fileMap: {[key: string]: string},
|
|
46
|
-
outputFile: string,
|
|
47
|
-
level: number = 0
|
|
48
|
-
) {
|
|
49
|
-
// Before creating a new file, we need to delete the old file
|
|
50
|
-
try {
|
|
51
|
-
await removeFile(outputFile);
|
|
52
|
-
} catch (e) {
|
|
53
|
-
// Do nothing if old file doesn't exist
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
const output = createWriteStream(outputFile);
|
|
57
|
-
const archive = archiver('zip', {
|
|
58
|
-
zlib: {level} // Sets the compression level.
|
|
59
|
-
});
|
|
60
|
-
|
|
61
|
-
return new Promise(async (resolve, reject) => {
|
|
62
|
-
// listen for all archive data to be writte
|
|
63
|
-
// 'close' event is fired only when a file descriptor is involved
|
|
64
|
-
output.on('close', function () {
|
|
65
|
-
console.log(`${outputFile} saved.`); // eslint-disable-line no-undef,no-console
|
|
66
|
-
console.log(`${archive.pointer()} total bytes`); // eslint-disable-line no-undef,no-console
|
|
67
|
-
resolve(null);
|
|
68
|
-
});
|
|
69
|
-
|
|
70
|
-
// This event is fired when the data source is drained no matter what was the data source.
|
|
71
|
-
// It is not part of this library but rather from the NodeJS Stream API.
|
|
72
|
-
// @see: https://nodejs.org/api/stream.html#stream_event_end
|
|
73
|
-
output.on('end', function () {
|
|
74
|
-
console.log('Data has been drained'); // eslint-disable-line no-undef,no-console
|
|
75
|
-
resolve(null);
|
|
76
|
-
});
|
|
77
|
-
|
|
78
|
-
// good practice to catch warnings (ie stat failures and other non-blocking errors)
|
|
79
|
-
archive.on('warning', function (err) {
|
|
80
|
-
console.log(err); // eslint-disable-line no-undef,no-console
|
|
81
|
-
reject(err);
|
|
82
|
-
});
|
|
83
|
-
|
|
84
|
-
// good practice to catch this error explicitly
|
|
85
|
-
archive.on('error', function (err) {
|
|
86
|
-
reject(err);
|
|
87
|
-
});
|
|
88
|
-
|
|
89
|
-
// pipe archive data to the file
|
|
90
|
-
archive.pipe(output);
|
|
91
|
-
|
|
92
|
-
for (const subFileName in fileMap) {
|
|
93
|
-
const subFileData = fileMap[subFileName];
|
|
94
|
-
await appendFileToArchive(archive, subFileName, subFileData);
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
// finalize the archive (ie we are done appending files but streams have to finish yet)
|
|
98
|
-
archive.finalize();
|
|
99
|
-
});
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
/**
|
|
103
|
-
* Compress files using external tool 'zip'/'7z'
|
|
104
|
-
*
|
|
105
|
-
* @param inputFolder - folder to archive - for cwd option
|
|
106
|
-
* @param outputFile - output slpk file
|
|
107
|
-
* @param level - compression level
|
|
108
|
-
* @param inputFiles - input files path to pass to the executable as option
|
|
109
|
-
* @param sevenZipExe - path to 7z.exe executable
|
|
110
|
-
*/
|
|
111
|
-
export async function compressWithChildProcess(
|
|
112
|
-
inputFolder: string,
|
|
113
|
-
outputFile: string,
|
|
114
|
-
level: number,
|
|
115
|
-
inputFiles: string,
|
|
116
|
-
sevenZipExe: string
|
|
117
|
-
) {
|
|
118
|
-
// eslint-disable-next-line no-undef
|
|
119
|
-
if (process.platform === 'win32') {
|
|
120
|
-
await compressWithChildProcessWindows(inputFolder, outputFile, level, inputFiles, sevenZipExe);
|
|
121
|
-
} else {
|
|
122
|
-
await compressWithChildProcessUnix(inputFolder, outputFile, level, inputFiles);
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
/**
|
|
127
|
-
* Compress files using external linux tool 'zip'
|
|
128
|
-
*
|
|
129
|
-
* @param inputFolder - folder to archive - for cwd option
|
|
130
|
-
* @param outputFile - output slpk file
|
|
131
|
-
* @param level - compression level
|
|
132
|
-
* @param inputFiles - input files path to pass to the executable as option
|
|
133
|
-
*/
|
|
134
|
-
async function compressWithChildProcessUnix(
|
|
135
|
-
inputFolder: string,
|
|
136
|
-
outputFile: string,
|
|
137
|
-
level: number = 0,
|
|
138
|
-
inputFiles: string = '.'
|
|
139
|
-
) {
|
|
140
|
-
const fullOutputFile = getAbsoluteFilePath(outputFile);
|
|
141
|
-
const args = [`-${level}`, '-r', fullOutputFile, inputFiles];
|
|
142
|
-
const childProcess = new ChildProcessProxy();
|
|
143
|
-
await childProcess.start({
|
|
144
|
-
command: 'zip',
|
|
145
|
-
arguments: args,
|
|
146
|
-
spawn: {
|
|
147
|
-
cwd: inputFolder
|
|
148
|
-
},
|
|
149
|
-
wait: 0
|
|
150
|
-
});
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
/**
|
|
154
|
-
* Compress files using windows external tool '7z'
|
|
155
|
-
*
|
|
156
|
-
* @param inputFolder - folder to archive - for cwd option
|
|
157
|
-
* @param outputFile - output slpk file
|
|
158
|
-
* @param level - compression level
|
|
159
|
-
* @param inputFiles - input files path to pass to the executable as option
|
|
160
|
-
* @param sevenZipExe - path to 7z.exe executable
|
|
161
|
-
*/
|
|
162
|
-
async function compressWithChildProcessWindows(
|
|
163
|
-
inputFolder: string,
|
|
164
|
-
outputFile: string,
|
|
165
|
-
level: number = 0,
|
|
166
|
-
inputFiles: string = join('.', '*'),
|
|
167
|
-
sevenZipExe: string
|
|
168
|
-
) {
|
|
169
|
-
// Workaround for @listfile issue. In 7z.exe @-leading files are handled as listfiles
|
|
170
|
-
// https://sevenzip.osdn.jp/chm/cmdline/syntax.htm
|
|
171
|
-
if (inputFiles[0] === '@') {
|
|
172
|
-
inputFiles = `*${inputFiles.substr(1)}`;
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
const fullOutputFile = getAbsoluteFilePath(outputFile);
|
|
176
|
-
const args = ['a', '-tzip', `-mx=${level}`, fullOutputFile, inputFiles];
|
|
177
|
-
const childProcess = new ChildProcessProxy();
|
|
178
|
-
await childProcess.start({
|
|
179
|
-
command: sevenZipExe,
|
|
180
|
-
arguments: args,
|
|
181
|
-
spawn: {
|
|
182
|
-
cwd: `${inputFolder}`
|
|
183
|
-
},
|
|
184
|
-
wait: 0
|
|
185
|
-
});
|
|
186
|
-
}
|
|
187
|
-
|
|
188
|
-
/**
|
|
189
|
-
* Generate hash file from zip archive
|
|
190
|
-
* https://github.com/Esri/i3s-spec/blob/master/docs/1.7/slpk_hashtable.cmn.md
|
|
191
|
-
*
|
|
192
|
-
* @param inputZipFile
|
|
193
|
-
* @param outputFile
|
|
194
|
-
*/
|
|
195
|
-
export async function generateHash128FromZip(inputZipFile: string, outputFile: string) {
|
|
196
|
-
const input = await fs.readFile(inputZipFile);
|
|
197
|
-
const zip = await JSZip.loadAsync(input);
|
|
198
|
-
const hashTable: {key: string; value: string}[] = [];
|
|
199
|
-
const zipFiles = zip.files;
|
|
200
|
-
for (const relativePath in zipFiles) {
|
|
201
|
-
const zipEntry = zipFiles[relativePath];
|
|
202
|
-
// Had to use a workaround because the correct string is getting the wrong data
|
|
203
|
-
// const content = await zipEntry.async('nodebuffer');
|
|
204
|
-
// _data isn't described in the interface, so lint thought it was wrong
|
|
205
|
-
const _data = '_data';
|
|
206
|
-
const content = zipEntry[_data].compressedContent;
|
|
207
|
-
if (zipEntry.dir) continue; // eslint-disable-line no-continue
|
|
208
|
-
// eslint-disable-next-line no-undef
|
|
209
|
-
const hash = await new MD5Hash().hash(Buffer.from(relativePath.toLowerCase()), 'base64');
|
|
210
|
-
// eslint-disable-next-line no-undef
|
|
211
|
-
hashTable.push({key: atob(hash), value: content.byteOffset});
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
hashTable.sort((prev, next) => {
|
|
215
|
-
if (prev.key === next.key) {
|
|
216
|
-
return prev.value < next.value ? -1 : 1;
|
|
217
|
-
}
|
|
218
|
-
return prev.key < next.key ? -1 : 1;
|
|
219
|
-
});
|
|
220
|
-
|
|
221
|
-
const output = createWriteStream(outputFile);
|
|
222
|
-
return new Promise((resolve, reject) => {
|
|
223
|
-
output.on('close', function () {
|
|
224
|
-
console.log(`${outputFile} generated and saved`); // eslint-disable-line
|
|
225
|
-
resolve(null);
|
|
226
|
-
});
|
|
227
|
-
output.on('error', function (err) {
|
|
228
|
-
console.log(err); // eslint-disable-line
|
|
229
|
-
reject(err);
|
|
230
|
-
});
|
|
231
|
-
for (const key in hashTable) {
|
|
232
|
-
const item = hashTable[key];
|
|
233
|
-
const value = longToByteArray(item.value);
|
|
234
|
-
// TODO: perhaps you need to wait for the 'drain' event if the write returns 'false'
|
|
235
|
-
// eslint-disable-next-line no-undef
|
|
236
|
-
output.write(Buffer.from(crypt.hexToBytes(item.key).concat(value)));
|
|
237
|
-
}
|
|
238
|
-
output.close();
|
|
239
|
-
});
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
/**
|
|
243
|
-
* Encode 64 bit value to byte array
|
|
244
|
-
*
|
|
245
|
-
* @param long - stringified number
|
|
246
|
-
* @returns
|
|
247
|
-
*/
|
|
248
|
-
function longToByteArray(long: string): number[] {
|
|
249
|
-
const buffer = new ArrayBuffer(8); // JS numbers are 8 bytes long, or 64 bits
|
|
250
|
-
const longNum = new Float64Array(buffer); // so equivalent to Float64
|
|
251
|
-
longNum[0] = parseInt(long);
|
|
252
|
-
return Array.from(new Uint8Array(buffer)).reverse(); // reverse to get little endian
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
/**
|
|
256
|
-
* Add file to zip archive
|
|
257
|
-
*
|
|
258
|
-
* @param inputFile
|
|
259
|
-
* @param fileName
|
|
260
|
-
* @param zipFile
|
|
261
|
-
* @param sevenZipExe
|
|
262
|
-
*/
|
|
263
|
-
export async function addFileToZip(
|
|
264
|
-
inputFolder: string,
|
|
265
|
-
fileName: string,
|
|
266
|
-
zipFile: string,
|
|
267
|
-
sevenZipExe: string
|
|
268
|
-
) {
|
|
269
|
-
await compressWithChildProcess(inputFolder, zipFile, 0, fileName, sevenZipExe);
|
|
270
|
-
console.log(`${fileName} added to ${zipFile}.`); // eslint-disable-line
|
|
271
|
-
}
|
|
272
|
-
|
|
273
|
-
/**
|
|
274
|
-
*
|
|
275
|
-
* @param archive zip archive instance
|
|
276
|
-
* @param subFileName file path inside archive
|
|
277
|
-
* @param subFileData source file path
|
|
278
|
-
* @returns
|
|
279
|
-
*/
|
|
280
|
-
function appendFileToArchive(archive: any, subFileName: string, subFileData: string) {
|
|
281
|
-
return new Promise((resolve) => {
|
|
282
|
-
const fileStream = createReadStream(subFileData);
|
|
283
|
-
console.log(`Compression start: ${subFileName}`); // eslint-disable-line no-undef,no-console
|
|
284
|
-
fileStream.on('close', () => {
|
|
285
|
-
console.log(`Compression finish: ${subFileName}`); // eslint-disable-line no-undef,no-console
|
|
286
|
-
resolve(null);
|
|
287
|
-
});
|
|
288
|
-
archive.append(fileStream, {name: subFileName});
|
|
289
|
-
});
|
|
290
|
-
}
|
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
import {isDeepStrictEqual} from 'util';
|
|
2
|
+
import {DUMP_FILE_SUFFIX} from '../../constants';
|
|
3
|
+
import {isFileExists, openJson, removeFile, writeFile} from './file-utils';
|
|
4
|
+
import {join} from 'path';
|
|
5
|
+
import {BoundingVolumes, I3SMaterialDefinition, TextureSetDefinitionFormats} from '@loaders.gl/i3s';
|
|
6
|
+
import {AttributeMetadataInfoObject} from '../../i3s-converter/helpers/attribute-metadata-info';
|
|
7
|
+
|
|
8
|
+
export type ConversionDumpOptions = {
|
|
9
|
+
inputUrl: string;
|
|
10
|
+
outputPath: string;
|
|
11
|
+
tilesetName: string;
|
|
12
|
+
maxDepth: number;
|
|
13
|
+
slpk: boolean;
|
|
14
|
+
egmFilePath: string;
|
|
15
|
+
token: string;
|
|
16
|
+
draco: boolean;
|
|
17
|
+
mergeMaterials: boolean;
|
|
18
|
+
generateTextures: boolean;
|
|
19
|
+
generateBoundingVolumes: boolean;
|
|
20
|
+
metadataClass: string;
|
|
21
|
+
analyze: boolean;
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
type NodeDoneStatus = {
|
|
25
|
+
nodeId: number;
|
|
26
|
+
done: boolean;
|
|
27
|
+
progress?: Record<string, boolean>;
|
|
28
|
+
dumpMetadata?: DumpMetadata;
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
type TilesConverted = {
|
|
32
|
+
nodes: NodeDoneStatus[];
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
export type DumpMetadata = {
|
|
36
|
+
boundingVolumes: BoundingVolumes | null;
|
|
37
|
+
attributesCount?: number;
|
|
38
|
+
featureCount: number | null;
|
|
39
|
+
geometry: boolean;
|
|
40
|
+
hasUvRegions: boolean;
|
|
41
|
+
materialId: number | null;
|
|
42
|
+
texelCountHint?: number;
|
|
43
|
+
vertexCount: number | null;
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
export type TextureSetDefinition = {
|
|
47
|
+
formats: TextureSetDefinitionFormats;
|
|
48
|
+
atlas?: boolean;
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
export class ConversionDump {
|
|
52
|
+
/**Restored/resumed dump indicator */
|
|
53
|
+
restored: boolean = false;
|
|
54
|
+
/** Conversion options */
|
|
55
|
+
private options?: ConversionDumpOptions;
|
|
56
|
+
/** Tiles conversion progress status map */
|
|
57
|
+
tilesConverted: Record<string, TilesConverted>;
|
|
58
|
+
/** Textures formats definitions */
|
|
59
|
+
textureSetDefinitions?: TextureSetDefinition[];
|
|
60
|
+
/** Attributes Metadata */
|
|
61
|
+
attributeMetadataInfo?: AttributeMetadataInfoObject;
|
|
62
|
+
/** Array of materials definitions */
|
|
63
|
+
materialDefinitions: I3SMaterialDefinition[] = [];
|
|
64
|
+
|
|
65
|
+
constructor() {
|
|
66
|
+
this.tilesConverted = {};
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Create a dump with convertion options
|
|
71
|
+
* @param currentOptions - converter options
|
|
72
|
+
*/
|
|
73
|
+
async createDump(currentOptions: ConversionDumpOptions): Promise<void> {
|
|
74
|
+
const {
|
|
75
|
+
tilesetName,
|
|
76
|
+
slpk,
|
|
77
|
+
egmFilePath,
|
|
78
|
+
inputUrl,
|
|
79
|
+
outputPath,
|
|
80
|
+
draco = true,
|
|
81
|
+
maxDepth,
|
|
82
|
+
token,
|
|
83
|
+
generateTextures,
|
|
84
|
+
generateBoundingVolumes,
|
|
85
|
+
mergeMaterials = true,
|
|
86
|
+
metadataClass,
|
|
87
|
+
analyze = false
|
|
88
|
+
} = currentOptions;
|
|
89
|
+
this.options = {
|
|
90
|
+
tilesetName,
|
|
91
|
+
slpk,
|
|
92
|
+
egmFilePath,
|
|
93
|
+
inputUrl,
|
|
94
|
+
outputPath,
|
|
95
|
+
draco,
|
|
96
|
+
maxDepth,
|
|
97
|
+
token,
|
|
98
|
+
generateTextures,
|
|
99
|
+
generateBoundingVolumes,
|
|
100
|
+
mergeMaterials,
|
|
101
|
+
metadataClass,
|
|
102
|
+
analyze
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
const dumpFilename = join(
|
|
106
|
+
this.options.outputPath,
|
|
107
|
+
this.options.tilesetName,
|
|
108
|
+
`${this.options.tilesetName}${DUMP_FILE_SUFFIX}`
|
|
109
|
+
);
|
|
110
|
+
if (await isFileExists(dumpFilename)) {
|
|
111
|
+
const {
|
|
112
|
+
options,
|
|
113
|
+
tilesConverted,
|
|
114
|
+
textureSetDefinitions,
|
|
115
|
+
attributeMetadataInfo,
|
|
116
|
+
materialDefinitions
|
|
117
|
+
} = await openJson(
|
|
118
|
+
join(this.options.outputPath, this.options.tilesetName),
|
|
119
|
+
`${this.options.tilesetName}${DUMP_FILE_SUFFIX}`
|
|
120
|
+
);
|
|
121
|
+
if (isDeepStrictEqual(options, JSON.parse(JSON.stringify(this.options)))) {
|
|
122
|
+
this.tilesConverted = tilesConverted;
|
|
123
|
+
this.textureSetDefinitions = textureSetDefinitions;
|
|
124
|
+
this.attributeMetadataInfo = attributeMetadataInfo;
|
|
125
|
+
this.materialDefinitions = materialDefinitions;
|
|
126
|
+
this.restored = true;
|
|
127
|
+
return;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
await this.deleteDumpFile();
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Reset a dump
|
|
135
|
+
*/
|
|
136
|
+
reset(): void {
|
|
137
|
+
this.restored = false;
|
|
138
|
+
this.tilesConverted = {};
|
|
139
|
+
if (this.textureSetDefinitions) {
|
|
140
|
+
delete this.textureSetDefinitions;
|
|
141
|
+
}
|
|
142
|
+
if (this.attributeMetadataInfo) {
|
|
143
|
+
delete this.attributeMetadataInfo;
|
|
144
|
+
}
|
|
145
|
+
if (this.materialDefinitions.length > 0) {
|
|
146
|
+
this.materialDefinitions = [];
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Update conversion status in the dump file
|
|
152
|
+
*/
|
|
153
|
+
private async updateDumpFile(): Promise<void> {
|
|
154
|
+
if (this.options?.outputPath && this.options.tilesetName) {
|
|
155
|
+
try {
|
|
156
|
+
await writeFile(
|
|
157
|
+
join(this.options.outputPath, this.options.tilesetName),
|
|
158
|
+
JSON.stringify({
|
|
159
|
+
options: this.options,
|
|
160
|
+
tilesConverted: this.tilesConverted,
|
|
161
|
+
textureSetDefinitions: this.textureSetDefinitions,
|
|
162
|
+
attributeMetadataInfo: this.attributeMetadataInfo,
|
|
163
|
+
materialDefinitions: this.materialDefinitions
|
|
164
|
+
}),
|
|
165
|
+
`${this.options.tilesetName}${DUMP_FILE_SUFFIX}`
|
|
166
|
+
);
|
|
167
|
+
} catch (error) {
|
|
168
|
+
console.log("Can't update dump file", error);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Delete a dump file
|
|
175
|
+
*/
|
|
176
|
+
async deleteDumpFile(): Promise<void> {
|
|
177
|
+
if (
|
|
178
|
+
this.options?.outputPath &&
|
|
179
|
+
this.options.tilesetName &&
|
|
180
|
+
(await isFileExists(
|
|
181
|
+
join(
|
|
182
|
+
this.options.outputPath,
|
|
183
|
+
this.options.tilesetName,
|
|
184
|
+
`${this.options.tilesetName}${DUMP_FILE_SUFFIX}`
|
|
185
|
+
)
|
|
186
|
+
))
|
|
187
|
+
) {
|
|
188
|
+
await removeFile(
|
|
189
|
+
join(
|
|
190
|
+
this.options.outputPath,
|
|
191
|
+
this.options.tilesetName,
|
|
192
|
+
`${this.options.tilesetName}${DUMP_FILE_SUFFIX}`
|
|
193
|
+
)
|
|
194
|
+
);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
/**
|
|
199
|
+
* Get record from the tilesConverted Map
|
|
200
|
+
* @param fileName - source filename
|
|
201
|
+
* @returns existing object from the tilesConverted Map
|
|
202
|
+
*/
|
|
203
|
+
private getRecord(fileName: string) {
|
|
204
|
+
return this.tilesConverted[fileName];
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Set a record for the dump file
|
|
209
|
+
* @param fileName - key - source filename
|
|
210
|
+
* @param object - value
|
|
211
|
+
*/
|
|
212
|
+
private setRecord(fileName: string, object: any) {
|
|
213
|
+
this.tilesConverted[fileName] = object;
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
/**
|
|
217
|
+
* Add a node into the dump file for the source file record
|
|
218
|
+
* @param fileName - source filename
|
|
219
|
+
* @param nodeId - nodeId of the node
|
|
220
|
+
*/
|
|
221
|
+
async addNode(filename: string, nodeId: number, dumpMetadata: DumpMetadata) {
|
|
222
|
+
const {nodes} = this.getRecord(filename) || {nodes: []};
|
|
223
|
+
nodes.push({nodeId, done: false, progress: {}, dumpMetadata});
|
|
224
|
+
if (nodes.length === 1) {
|
|
225
|
+
this.setRecord(filename, {nodes});
|
|
226
|
+
}
|
|
227
|
+
await this.updateDumpFile();
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
/**
|
|
231
|
+
* Clear dump record got the source filename
|
|
232
|
+
* @param fileName - source filename
|
|
233
|
+
*/
|
|
234
|
+
clearDumpRecord(filename: string) {
|
|
235
|
+
this.setRecord(filename, {nodes: []});
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Add textures definitions into the dump file
|
|
240
|
+
* @param textureDefinitions - textures definitions array
|
|
241
|
+
*/
|
|
242
|
+
addTexturesDefinitions(textureDefinitions: TextureSetDefinition[]) {
|
|
243
|
+
this.textureSetDefinitions = textureDefinitions;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
/**
|
|
247
|
+
* Update done status object for the writing resources
|
|
248
|
+
* @param fileName - key - source filename
|
|
249
|
+
* @param nodeId - nodeId for the source filename
|
|
250
|
+
* @param resourceType - resource type to update status
|
|
251
|
+
* @param value - value
|
|
252
|
+
*/
|
|
253
|
+
updateDoneStatus(filename: string, nodeId: number, resourceType: string, value: boolean) {
|
|
254
|
+
const nodeDump = this.tilesConverted[filename]?.nodes.find(
|
|
255
|
+
(element) => element.nodeId === nodeId
|
|
256
|
+
);
|
|
257
|
+
if (nodeDump) {
|
|
258
|
+
if (!nodeDump.progress) {
|
|
259
|
+
nodeDump.progress = {};
|
|
260
|
+
}
|
|
261
|
+
nodeDump.progress[resourceType] = value;
|
|
262
|
+
if (!value) {
|
|
263
|
+
nodeDump.done = false;
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
/**
|
|
269
|
+
* Update dump file according to writing results
|
|
270
|
+
* @param changedRecords - array of parameters ids for the written resources
|
|
271
|
+
* @param writeResults - array of writing resource files results
|
|
272
|
+
*/
|
|
273
|
+
async updateConvertedTilesDump(
|
|
274
|
+
changedRecords: {outputId?: number; sourceId?: string; resourceType?: string}[],
|
|
275
|
+
writeResults: PromiseSettledResult<string | null>[]
|
|
276
|
+
) {
|
|
277
|
+
for (let i = 0; i < changedRecords.length; i++) {
|
|
278
|
+
if (changedRecords[i] && 'value' in writeResults[i]) {
|
|
279
|
+
const {sourceId, resourceType, outputId} = changedRecords[i];
|
|
280
|
+
if (!sourceId || !resourceType || !outputId) continue;
|
|
281
|
+
for (const node of this.tilesConverted[sourceId].nodes) {
|
|
282
|
+
if (node.nodeId === outputId && node.progress) {
|
|
283
|
+
node.progress[resourceType] = true;
|
|
284
|
+
|
|
285
|
+
let done = false;
|
|
286
|
+
for (const key in node.progress) {
|
|
287
|
+
done = node.progress[key];
|
|
288
|
+
if (!done) break;
|
|
289
|
+
}
|
|
290
|
+
node.done = done;
|
|
291
|
+
if (node.done) {
|
|
292
|
+
delete node.progress;
|
|
293
|
+
}
|
|
294
|
+
break;
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
await this.updateDumpFile();
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
/**
|
|
303
|
+
* Check is source file conversion complete
|
|
304
|
+
* @param filename - source filename
|
|
305
|
+
* @returns true if source file conversion complete
|
|
306
|
+
*/
|
|
307
|
+
isFileConversionComplete(filename: string): boolean {
|
|
308
|
+
let result = true;
|
|
309
|
+
for (const node of this.tilesConverted[filename]?.nodes || []) {
|
|
310
|
+
if (!node.done) {
|
|
311
|
+
result = false;
|
|
312
|
+
break;
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
return result && this.tilesConverted[filename]?.nodes?.length > 0;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
/**
|
|
319
|
+
* Set materialDefinitions into a dump
|
|
320
|
+
* @param materialDefinitions - Array materialDefinitions
|
|
321
|
+
*/
|
|
322
|
+
setMaterialsDefinitions(materialDefinitions: I3SMaterialDefinition[]): void {
|
|
323
|
+
this.materialDefinitions = materialDefinitions;
|
|
324
|
+
}
|
|
325
|
+
}
|
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
import {Queue} from './queue';
|
|
2
2
|
import process from 'process';
|
|
3
|
+
import {ConversionDump} from './conversion-dump';
|
|
3
4
|
|
|
4
5
|
/** Memory limit size is based on testing */
|
|
5
6
|
const MEMORY_LIMIT = 4 * 1024 * 1024 * 1024; // 4GB
|
|
6
7
|
|
|
7
8
|
export type WriteQueueItem = {
|
|
8
9
|
archiveKey?: string;
|
|
10
|
+
sourceId?: string;
|
|
11
|
+
outputId?: number;
|
|
12
|
+
resourceType?: string;
|
|
9
13
|
/**
|
|
10
14
|
* writePromise() returns a Promise that will be awaited in Promise.allSettled(promises);
|
|
11
15
|
* Arguments for this call are specified in writeQueue.enqueue call like this:
|
|
@@ -27,13 +31,19 @@ export type WriteQueueItem = {
|
|
|
27
31
|
|
|
28
32
|
export default class WriteQueue<T extends WriteQueueItem> extends Queue<T> {
|
|
29
33
|
private intervalId?: NodeJS.Timeout;
|
|
34
|
+
private conversionDump: ConversionDump;
|
|
30
35
|
public writePromise: Promise<void> | null = null;
|
|
31
36
|
public fileMap: {[key: string]: string} = {};
|
|
32
37
|
public listeningInterval: number;
|
|
33
38
|
public writeConcurrency: number;
|
|
34
39
|
|
|
35
|
-
constructor(
|
|
40
|
+
constructor(
|
|
41
|
+
conversionDump: ConversionDump,
|
|
42
|
+
listeningInterval: number = 2000,
|
|
43
|
+
writeConcurrency: number = 400
|
|
44
|
+
) {
|
|
36
45
|
super();
|
|
46
|
+
this.conversionDump = conversionDump;
|
|
37
47
|
this.listeningInterval = listeningInterval;
|
|
38
48
|
this.writeConcurrency = writeConcurrency;
|
|
39
49
|
}
|
|
@@ -81,18 +91,21 @@ export default class WriteQueue<T extends WriteQueueItem> extends Queue<T> {
|
|
|
81
91
|
while (this.length) {
|
|
82
92
|
const promises: Promise<string | null>[] = [];
|
|
83
93
|
const archiveKeys: (string | undefined)[] = [];
|
|
94
|
+
const changedRecords: {outputId?: number; sourceId?: string; resourceType?: string}[] = [];
|
|
84
95
|
for (let i = 0; i < this.writeConcurrency; i++) {
|
|
85
96
|
const item = this.dequeue();
|
|
86
97
|
if (!item) {
|
|
87
98
|
break;
|
|
88
99
|
}
|
|
89
|
-
const {archiveKey, writePromise} = item as WriteQueueItem;
|
|
100
|
+
const {archiveKey, sourceId, outputId, resourceType, writePromise} = item as WriteQueueItem;
|
|
90
101
|
archiveKeys.push(archiveKey);
|
|
102
|
+
changedRecords.push({sourceId, outputId, resourceType});
|
|
91
103
|
const promise = writePromise();
|
|
92
104
|
promises.push(promise);
|
|
93
105
|
}
|
|
94
106
|
const writeResults = await Promise.allSettled(promises);
|
|
95
107
|
this.updateFileMap(archiveKeys, writeResults);
|
|
108
|
+
await this.conversionDump.updateConvertedTilesDump(changedRecords, writeResults);
|
|
96
109
|
}
|
|
97
110
|
}
|
|
98
111
|
|