@loaders.gl/tile-converter 4.1.0-alpha.1 → 4.1.0-alpha.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/3d-tiles-converter/helpers/b3dm-converter.d.ts.map +1 -1
  2. package/dist/3d-tiles-converter/helpers/b3dm-converter.js +8 -0
  3. package/dist/3d-tiles-converter/helpers/b3dm-converter.js.map +1 -1
  4. package/dist/constants.d.ts +1 -0
  5. package/dist/constants.d.ts.map +1 -1
  6. package/dist/constants.js +1 -0
  7. package/dist/constants.js.map +1 -1
  8. package/dist/converter-cli.js +41 -4
  9. package/dist/converter-cli.js.map +1 -1
  10. package/dist/converter.min.cjs +131 -220
  11. package/dist/deps-installer/deps-installer.d.ts.map +1 -1
  12. package/dist/deps-installer/deps-installer.js +4 -3
  13. package/dist/deps-installer/deps-installer.js.map +1 -1
  14. package/dist/i3s-converter/i3s-converter.d.ts +14 -0
  15. package/dist/i3s-converter/i3s-converter.d.ts.map +1 -1
  16. package/dist/i3s-converter/i3s-converter.js +73 -19
  17. package/dist/i3s-converter/i3s-converter.js.map +1 -1
  18. package/dist/i3s-converter/types.d.ts +7 -0
  19. package/dist/i3s-converter/types.d.ts.map +1 -1
  20. package/dist/i3s-converter/types.js +8 -0
  21. package/dist/i3s-converter/types.js.map +1 -1
  22. package/dist/i3s-server/bin/i3s-server.min.cjs +80 -80
  23. package/dist/index.cjs +365 -93
  24. package/dist/lib/utils/compress-util.d.ts +0 -37
  25. package/dist/lib/utils/compress-util.d.ts.map +1 -1
  26. package/dist/lib/utils/compress-util.js +1 -149
  27. package/dist/lib/utils/compress-util.js.map +1 -1
  28. package/dist/lib/utils/conversion-dump.d.ts +81 -0
  29. package/dist/lib/utils/conversion-dump.d.ts.map +1 -0
  30. package/dist/lib/utils/conversion-dump.js +131 -0
  31. package/dist/lib/utils/conversion-dump.js.map +1 -0
  32. package/dist/lib/utils/statistic-utills.d.ts +23 -6
  33. package/dist/lib/utils/write-queue.d.ts +6 -1
  34. package/dist/lib/utils/write-queue.d.ts.map +1 -1
  35. package/dist/lib/utils/write-queue.js +15 -3
  36. package/dist/lib/utils/write-queue.js.map +1 -1
  37. package/dist/pgm-loader.js +1 -1
  38. package/dist/pgm-loader.js.map +1 -1
  39. package/dist/slpk-extractor.min.cjs +46 -46
  40. package/package.json +16 -16
  41. package/src/3d-tiles-converter/helpers/b3dm-converter.ts +19 -0
  42. package/src/constants.ts +1 -0
  43. package/src/converter-cli.ts +58 -4
  44. package/src/deps-installer/deps-installer.ts +3 -2
  45. package/src/i3s-converter/i3s-converter.ts +191 -51
  46. package/src/i3s-converter/types.ts +8 -0
  47. package/src/lib/utils/compress-util.ts +1 -264
  48. package/src/lib/utils/conversion-dump.ts +203 -0
  49. package/src/lib/utils/write-queue.ts +15 -2
  50. package/dist/lib/utils/statistic-utills.d.js +0 -2
  51. package/dist/lib/utils/statistic-utills.d.js.map +0 -1
@@ -1,13 +1,5 @@
1
1
  import {createGzip} from 'zlib';
2
- import {join} from 'path';
3
- import {promises as fs, createReadStream, createWriteStream} from 'fs';
4
- import archiver from 'archiver';
5
- import {removeFile} from './file-utils';
6
- import {ChildProcessProxy} from '@loaders.gl/worker-utils';
7
- import JSZip from 'jszip';
8
- import {MD5Hash} from '@loaders.gl/crypto';
9
- import crypt from 'crypt';
10
- import {getAbsoluteFilePath} from './file-utils';
2
+ import {createReadStream, createWriteStream} from 'fs';
11
3
 
12
4
  /**
13
5
  * Compress file to gzip file
@@ -33,258 +25,3 @@ export function compressFileWithGzip(pathFile: string): Promise<string> {
33
25
  input.pipe(gzip).pipe(output);
34
26
  });
35
27
  }
36
-
37
- /**
38
- * Compress files from map into slpk file
39
- *
40
- * @param fileMap - map with file paths (key: output path, value: input path)
41
- * @param outputFile - output slpk file
42
- * @param level - compression level
43
- */
44
- export async function compressFilesWithZip(
45
- fileMap: {[key: string]: string},
46
- outputFile: string,
47
- level: number = 0
48
- ) {
49
- // Before creating a new file, we need to delete the old file
50
- try {
51
- await removeFile(outputFile);
52
- } catch (e) {
53
- // Do nothing if old file doesn't exist
54
- }
55
-
56
- const output = createWriteStream(outputFile);
57
- const archive = archiver('zip', {
58
- zlib: {level} // Sets the compression level.
59
- });
60
-
61
- return new Promise(async (resolve, reject) => {
62
- // listen for all archive data to be writte
63
- // 'close' event is fired only when a file descriptor is involved
64
- output.on('close', function () {
65
- console.log(`${outputFile} saved.`); // eslint-disable-line no-undef,no-console
66
- console.log(`${archive.pointer()} total bytes`); // eslint-disable-line no-undef,no-console
67
- resolve(null);
68
- });
69
-
70
- // This event is fired when the data source is drained no matter what was the data source.
71
- // It is not part of this library but rather from the NodeJS Stream API.
72
- // @see: https://nodejs.org/api/stream.html#stream_event_end
73
- output.on('end', function () {
74
- console.log('Data has been drained'); // eslint-disable-line no-undef,no-console
75
- resolve(null);
76
- });
77
-
78
- // good practice to catch warnings (ie stat failures and other non-blocking errors)
79
- archive.on('warning', function (err) {
80
- console.log(err); // eslint-disable-line no-undef,no-console
81
- reject(err);
82
- });
83
-
84
- // good practice to catch this error explicitly
85
- archive.on('error', function (err) {
86
- reject(err);
87
- });
88
-
89
- // pipe archive data to the file
90
- archive.pipe(output);
91
-
92
- for (const subFileName in fileMap) {
93
- const subFileData = fileMap[subFileName];
94
- await appendFileToArchive(archive, subFileName, subFileData);
95
- }
96
-
97
- // finalize the archive (ie we are done appending files but streams have to finish yet)
98
- archive.finalize();
99
- });
100
- }
101
-
102
- /**
103
- * Compress files using external tool 'zip'/'7z'
104
- *
105
- * @param inputFolder - folder to archive - for cwd option
106
- * @param outputFile - output slpk file
107
- * @param level - compression level
108
- * @param inputFiles - input files path to pass to the executable as option
109
- * @param sevenZipExe - path to 7z.exe executable
110
- */
111
- export async function compressWithChildProcess(
112
- inputFolder: string,
113
- outputFile: string,
114
- level: number,
115
- inputFiles: string,
116
- sevenZipExe: string
117
- ) {
118
- // eslint-disable-next-line no-undef
119
- if (process.platform === 'win32') {
120
- await compressWithChildProcessWindows(inputFolder, outputFile, level, inputFiles, sevenZipExe);
121
- } else {
122
- await compressWithChildProcessUnix(inputFolder, outputFile, level, inputFiles);
123
- }
124
- }
125
-
126
- /**
127
- * Compress files using external linux tool 'zip'
128
- *
129
- * @param inputFolder - folder to archive - for cwd option
130
- * @param outputFile - output slpk file
131
- * @param level - compression level
132
- * @param inputFiles - input files path to pass to the executable as option
133
- */
134
- async function compressWithChildProcessUnix(
135
- inputFolder: string,
136
- outputFile: string,
137
- level: number = 0,
138
- inputFiles: string = '.'
139
- ) {
140
- const fullOutputFile = getAbsoluteFilePath(outputFile);
141
- const args = [`-${level}`, '-r', fullOutputFile, inputFiles];
142
- const childProcess = new ChildProcessProxy();
143
- await childProcess.start({
144
- command: 'zip',
145
- arguments: args,
146
- spawn: {
147
- cwd: inputFolder
148
- },
149
- wait: 0
150
- });
151
- }
152
-
153
- /**
154
- * Compress files using windows external tool '7z'
155
- *
156
- * @param inputFolder - folder to archive - for cwd option
157
- * @param outputFile - output slpk file
158
- * @param level - compression level
159
- * @param inputFiles - input files path to pass to the executable as option
160
- * @param sevenZipExe - path to 7z.exe executable
161
- */
162
- async function compressWithChildProcessWindows(
163
- inputFolder: string,
164
- outputFile: string,
165
- level: number = 0,
166
- inputFiles: string = join('.', '*'),
167
- sevenZipExe: string
168
- ) {
169
- // Workaround for @listfile issue. In 7z.exe @-leading files are handled as listfiles
170
- // https://sevenzip.osdn.jp/chm/cmdline/syntax.htm
171
- if (inputFiles[0] === '@') {
172
- inputFiles = `*${inputFiles.substr(1)}`;
173
- }
174
-
175
- const fullOutputFile = getAbsoluteFilePath(outputFile);
176
- const args = ['a', '-tzip', `-mx=${level}`, fullOutputFile, inputFiles];
177
- const childProcess = new ChildProcessProxy();
178
- await childProcess.start({
179
- command: sevenZipExe,
180
- arguments: args,
181
- spawn: {
182
- cwd: `${inputFolder}`
183
- },
184
- wait: 0
185
- });
186
- }
187
-
188
- /**
189
- * Generate hash file from zip archive
190
- * https://github.com/Esri/i3s-spec/blob/master/docs/1.7/slpk_hashtable.cmn.md
191
- *
192
- * @param inputZipFile
193
- * @param outputFile
194
- */
195
- export async function generateHash128FromZip(inputZipFile: string, outputFile: string) {
196
- const input = await fs.readFile(inputZipFile);
197
- const zip = await JSZip.loadAsync(input);
198
- const hashTable: {key: string; value: string}[] = [];
199
- const zipFiles = zip.files;
200
- for (const relativePath in zipFiles) {
201
- const zipEntry = zipFiles[relativePath];
202
- // Had to use a workaround because the correct string is getting the wrong data
203
- // const content = await zipEntry.async('nodebuffer');
204
- // _data isn't described in the interface, so lint thought it was wrong
205
- const _data = '_data';
206
- const content = zipEntry[_data].compressedContent;
207
- if (zipEntry.dir) continue; // eslint-disable-line no-continue
208
- // eslint-disable-next-line no-undef
209
- const hash = await new MD5Hash().hash(Buffer.from(relativePath.toLowerCase()), 'base64');
210
- // eslint-disable-next-line no-undef
211
- hashTable.push({key: atob(hash), value: content.byteOffset});
212
- }
213
-
214
- hashTable.sort((prev, next) => {
215
- if (prev.key === next.key) {
216
- return prev.value < next.value ? -1 : 1;
217
- }
218
- return prev.key < next.key ? -1 : 1;
219
- });
220
-
221
- const output = createWriteStream(outputFile);
222
- return new Promise((resolve, reject) => {
223
- output.on('close', function () {
224
- console.log(`${outputFile} generated and saved`); // eslint-disable-line
225
- resolve(null);
226
- });
227
- output.on('error', function (err) {
228
- console.log(err); // eslint-disable-line
229
- reject(err);
230
- });
231
- for (const key in hashTable) {
232
- const item = hashTable[key];
233
- const value = longToByteArray(item.value);
234
- // TODO: perhaps you need to wait for the 'drain' event if the write returns 'false'
235
- // eslint-disable-next-line no-undef
236
- output.write(Buffer.from(crypt.hexToBytes(item.key).concat(value)));
237
- }
238
- output.close();
239
- });
240
- }
241
-
242
- /**
243
- * Encode 64 bit value to byte array
244
- *
245
- * @param long - stringified number
246
- * @returns
247
- */
248
- function longToByteArray(long: string): number[] {
249
- const buffer = new ArrayBuffer(8); // JS numbers are 8 bytes long, or 64 bits
250
- const longNum = new Float64Array(buffer); // so equivalent to Float64
251
- longNum[0] = parseInt(long);
252
- return Array.from(new Uint8Array(buffer)).reverse(); // reverse to get little endian
253
- }
254
-
255
- /**
256
- * Add file to zip archive
257
- *
258
- * @param inputFile
259
- * @param fileName
260
- * @param zipFile
261
- * @param sevenZipExe
262
- */
263
- export async function addFileToZip(
264
- inputFolder: string,
265
- fileName: string,
266
- zipFile: string,
267
- sevenZipExe: string
268
- ) {
269
- await compressWithChildProcess(inputFolder, zipFile, 0, fileName, sevenZipExe);
270
- console.log(`${fileName} added to ${zipFile}.`); // eslint-disable-line
271
- }
272
-
273
- /**
274
- *
275
- * @param archive zip archive instance
276
- * @param subFileName file path inside archive
277
- * @param subFileData source file path
278
- * @returns
279
- */
280
- function appendFileToArchive(archive: any, subFileName: string, subFileData: string) {
281
- return new Promise((resolve) => {
282
- const fileStream = createReadStream(subFileData);
283
- console.log(`Compression start: ${subFileName}`); // eslint-disable-line no-undef,no-console
284
- fileStream.on('close', () => {
285
- console.log(`Compression finish: ${subFileName}`); // eslint-disable-line no-undef,no-console
286
- resolve(null);
287
- });
288
- archive.append(fileStream, {name: subFileName});
289
- });
290
- }
@@ -0,0 +1,203 @@
1
+ import {DUMP_FILE_SUFFIX} from '../../constants';
2
+ import {removeFile, writeFile} from './file-utils';
3
+ import {join} from 'path';
4
+
5
+ export type ConversionDumpOptions = {
6
+ inputUrl: string;
7
+ outputPath: string;
8
+ tilesetName: string;
9
+ maxDepth: number;
10
+ slpk: boolean;
11
+ egmFilePath: string;
12
+ token: string;
13
+ draco: boolean;
14
+ mergeMaterials: boolean;
15
+ generateTextures: boolean;
16
+ generateBoundingVolumes: boolean;
17
+ metadataClass: string;
18
+ analyze: boolean;
19
+ };
20
+
21
+ type NodeDoneStatus = {
22
+ nodeId: number;
23
+ done: boolean;
24
+ progress: Record<string, boolean>;
25
+ };
26
+
27
+ type TilesConverted = {
28
+ nodes: NodeDoneStatus[];
29
+ };
30
+
31
+ export class ConversionDump {
32
+ /** Conversion options */
33
+ private options?: ConversionDumpOptions;
34
+ /** Tiles conversion progress status map */
35
+ tilesConverted: Record<string, TilesConverted>;
36
+
37
+ constructor() {
38
+ this.tilesConverted = {};
39
+ }
40
+
41
+ /**
42
+ * Create a dump file with convertion options
43
+ * @param options - converter options
44
+ */
45
+ async createDumpFile(options: ConversionDumpOptions): Promise<void> {
46
+ const {
47
+ tilesetName,
48
+ slpk,
49
+ egmFilePath,
50
+ inputUrl,
51
+ outputPath,
52
+ draco = true,
53
+ maxDepth,
54
+ token,
55
+ generateTextures,
56
+ generateBoundingVolumes,
57
+ mergeMaterials = true,
58
+ metadataClass,
59
+ analyze = false
60
+ } = options;
61
+ this.options = {
62
+ tilesetName,
63
+ slpk,
64
+ egmFilePath,
65
+ inputUrl,
66
+ outputPath,
67
+ draco,
68
+ maxDepth,
69
+ token,
70
+ generateTextures,
71
+ generateBoundingVolumes,
72
+ mergeMaterials,
73
+ metadataClass,
74
+ analyze
75
+ };
76
+
77
+ try {
78
+ await writeFile(
79
+ options.outputPath,
80
+ JSON.stringify({options: this.options}),
81
+ `${options.tilesetName}${DUMP_FILE_SUFFIX}`
82
+ );
83
+ } catch (error) {
84
+ console.log("Can't create dump file", error);
85
+ }
86
+ }
87
+
88
+ /**
89
+ * Update conversion status in the dump file
90
+ */
91
+ private async updateDumpFile(): Promise<void> {
92
+ if (this.options?.outputPath && this.options.tilesetName) {
93
+ try {
94
+ await writeFile(
95
+ this.options.outputPath,
96
+ JSON.stringify({
97
+ options: this.options,
98
+ tilesConverted: this.tilesConverted
99
+ }),
100
+ `${this.options.tilesetName}${DUMP_FILE_SUFFIX}`
101
+ );
102
+ } catch (error) {
103
+ console.log("Can't update dump file", error);
104
+ }
105
+ }
106
+ }
107
+
108
+ /**
109
+ * Delete a dump file
110
+ */
111
+ async deleteDumpFile(): Promise<void> {
112
+ if (this.options?.outputPath && this.options.tilesetName) {
113
+ await removeFile(
114
+ join(this.options.outputPath, `${this.options.tilesetName}${DUMP_FILE_SUFFIX}`)
115
+ );
116
+ }
117
+ }
118
+
119
+ /**
120
+ * Get record from the tilesConverted Map
121
+ * @param fileName - source filename
122
+ * @returns existing object from the tilesConverted Map
123
+ */
124
+ private getRecord(fileName: string) {
125
+ return this.tilesConverted[fileName];
126
+ }
127
+
128
+ /**
129
+ * Set a record for the dump file
130
+ * @param fileName - key - source filename
131
+ * @param object - value
132
+ */
133
+ private setRecord(fileName: string, object: any) {
134
+ this.tilesConverted[fileName] = object;
135
+ }
136
+
137
+ /**
138
+ * Add a node into the dump file for the source file record
139
+ * @param fileName - source filename
140
+ * @param nodeId - nodeId of the node
141
+ */
142
+ async addNode(filename: string, nodeId: number) {
143
+ const {nodes} = this.getRecord(filename) || {nodes: []};
144
+ nodes.push({nodeId, done: false, progress: {}});
145
+ if (nodes.length === 1) {
146
+ this.setRecord(filename, {nodes});
147
+ }
148
+ await this.updateDumpFile();
149
+ }
150
+
151
+ /**
152
+ * Update done status object for the writing resources
153
+ * @param fileName - key - source filename
154
+ * @param nodeId - nodeId for the source filename
155
+ * @param resourceType - resource type to update status
156
+ * @param value - value
157
+ */
158
+ updateDoneStatus(filename: string, nodeId: number, resourceType: string, value: boolean) {
159
+ const nodeDump = this.tilesConverted[filename]?.nodes.find(
160
+ (element) => element.nodeId === nodeId
161
+ );
162
+ if (nodeDump) {
163
+ nodeDump.progress[resourceType] = value;
164
+ if (!value) {
165
+ nodeDump.done = false;
166
+ }
167
+ }
168
+ }
169
+
170
+ /**
171
+ * Update dump file according to writing results
172
+ * @param changedRecords - array of parameters ids for the written resources
173
+ * @param writeResults - array of writing resource files results
174
+ */
175
+ async updateConvertedTilesDump(
176
+ changedRecords: {outputId?: number; sourceId?: string; resourceType?: string}[],
177
+ writeResults: PromiseSettledResult<string | null>[]
178
+ ) {
179
+ for (let i = 0; i < changedRecords.length; i++) {
180
+ if (changedRecords[i] && 'value' in writeResults[i]) {
181
+ const {sourceId, resourceType, outputId} = changedRecords[i];
182
+ if (!sourceId || !resourceType || !outputId) continue;
183
+ for (const node of this.tilesConverted[sourceId].nodes) {
184
+ if (node.nodeId === outputId) {
185
+ node.progress[resourceType] = true;
186
+
187
+ let done = false;
188
+ for (const key in node.progress) {
189
+ done = node.progress[key];
190
+ if (!done) break;
191
+ }
192
+ node.done = done;
193
+ if (node.done) {
194
+ node.progress = {};
195
+ }
196
+ break;
197
+ }
198
+ }
199
+ }
200
+ }
201
+ await this.updateDumpFile();
202
+ }
203
+ }
@@ -1,11 +1,15 @@
1
1
  import {Queue} from './queue';
2
2
  import process from 'process';
3
+ import {ConversionDump} from './conversion-dump';
3
4
 
4
5
  /** Memory limit size is based on testing */
5
6
  const MEMORY_LIMIT = 4 * 1024 * 1024 * 1024; // 4GB
6
7
 
7
8
  export type WriteQueueItem = {
8
9
  archiveKey?: string;
10
+ sourceId?: string;
11
+ outputId?: number;
12
+ resourceType?: string;
9
13
  /**
10
14
  * writePromise() returns a Promise that will be awaited in Promise.allSettled(promises);
11
15
  * Arguments for this call are specified in writeQueue.enqueue call like this:
@@ -27,13 +31,19 @@ export type WriteQueueItem = {
27
31
 
28
32
  export default class WriteQueue<T extends WriteQueueItem> extends Queue<T> {
29
33
  private intervalId?: NodeJS.Timeout;
34
+ private conversionDump: ConversionDump;
30
35
  public writePromise: Promise<void> | null = null;
31
36
  public fileMap: {[key: string]: string} = {};
32
37
  public listeningInterval: number;
33
38
  public writeConcurrency: number;
34
39
 
35
- constructor(listeningInterval: number = 2000, writeConcurrency: number = 400) {
40
+ constructor(
41
+ conversionDump: ConversionDump,
42
+ listeningInterval: number = 2000,
43
+ writeConcurrency: number = 400
44
+ ) {
36
45
  super();
46
+ this.conversionDump = conversionDump;
37
47
  this.listeningInterval = listeningInterval;
38
48
  this.writeConcurrency = writeConcurrency;
39
49
  }
@@ -81,18 +91,21 @@ export default class WriteQueue<T extends WriteQueueItem> extends Queue<T> {
81
91
  while (this.length) {
82
92
  const promises: Promise<string | null>[] = [];
83
93
  const archiveKeys: (string | undefined)[] = [];
94
+ const changedRecords: {outputId?: number; sourceId?: string; resourceType?: string}[] = [];
84
95
  for (let i = 0; i < this.writeConcurrency; i++) {
85
96
  const item = this.dequeue();
86
97
  if (!item) {
87
98
  break;
88
99
  }
89
- const {archiveKey, writePromise} = item as WriteQueueItem;
100
+ const {archiveKey, sourceId, outputId, resourceType, writePromise} = item as WriteQueueItem;
90
101
  archiveKeys.push(archiveKey);
102
+ changedRecords.push({sourceId, outputId, resourceType});
91
103
  const promise = writePromise();
92
104
  promises.push(promise);
93
105
  }
94
106
  const writeResults = await Promise.allSettled(promises);
95
107
  this.updateFileMap(archiveKeys, writeResults);
108
+ await this.conversionDump.updateConvertedTilesDump(changedRecords, writeResults);
96
109
  }
97
110
  }
98
111
 
@@ -1,2 +0,0 @@
1
-
2
- //# sourceMappingURL=statistic-utills.d.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"statistic-utills.d.js","names":[],"sources":["../../../src/lib/utils/statistic-utills.d.ts"],"sourcesContent":["/**\n * Do milliseconds time conversion to readable time string.\n * @param tile - 3d-tiles tile Object\n * @param coordinates - node converted coordinates\n * @returns String which characterizes conversion time period\n */\nexport function timeConverter(time: [number, number]): String;\n\n/**\n * Calculate files sizes after conversion.\n * @param params - Object with params of conversion.\n * @returns Promise with generated files size in bytes.\n */\nexport function calculateFilesSize(params: {\n slpk: boolean;\n outputPath: string;\n tilesetName: string;\n}): Number;\n\n/**\n * Reqursivelly calculate files sizes in directory.\n * @param dirPath - Directory path.\n * @returns Promise with files size in directory.\n */\nexport function getTotalFilesSize(dirPath: string): Number;\n"],"mappings":""}