@loaders.gl/tile-converter 4.1.0-alpha.1 → 4.1.0-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/constants.d.ts +1 -0
- package/dist/constants.d.ts.map +1 -1
- package/dist/constants.js +1 -0
- package/dist/constants.js.map +1 -1
- package/dist/converter-cli.js +41 -4
- package/dist/converter-cli.js.map +1 -1
- package/dist/converter.min.cjs +110 -110
- package/dist/deps-installer/deps-installer.d.ts.map +1 -1
- package/dist/deps-installer/deps-installer.js +4 -3
- package/dist/deps-installer/deps-installer.js.map +1 -1
- package/dist/i3s-converter/i3s-converter.d.ts +14 -0
- package/dist/i3s-converter/i3s-converter.d.ts.map +1 -1
- package/dist/i3s-converter/i3s-converter.js +71 -17
- package/dist/i3s-converter/i3s-converter.js.map +1 -1
- package/dist/i3s-converter/types.d.ts +7 -0
- package/dist/i3s-converter/types.d.ts.map +1 -1
- package/dist/i3s-converter/types.js +8 -0
- package/dist/i3s-converter/types.js.map +1 -1
- package/dist/i3s-server/bin/i3s-server.min.cjs +72 -72
- package/dist/index.cjs +347 -38
- package/dist/lib/utils/conversion-dump.d.ts +80 -0
- package/dist/lib/utils/conversion-dump.d.ts.map +1 -0
- package/dist/lib/utils/conversion-dump.js +127 -0
- package/dist/lib/utils/conversion-dump.js.map +1 -0
- package/dist/lib/utils/statistic-utills.d.ts +23 -6
- package/dist/lib/utils/write-queue.d.ts +6 -1
- package/dist/lib/utils/write-queue.d.ts.map +1 -1
- package/dist/lib/utils/write-queue.js +15 -3
- package/dist/lib/utils/write-queue.js.map +1 -1
- package/dist/pgm-loader.js +1 -1
- package/dist/pgm-loader.js.map +1 -1
- package/dist/slpk-extractor.min.cjs +46 -46
- package/package.json +16 -16
- package/src/constants.ts +1 -0
- package/src/converter-cli.ts +58 -4
- package/src/deps-installer/deps-installer.ts +3 -2
- package/src/i3s-converter/i3s-converter.ts +189 -21
- package/src/i3s-converter/types.ts +8 -0
- package/src/lib/utils/conversion-dump.ts +198 -0
- package/src/lib/utils/write-queue.ts +15 -2
- package/dist/lib/utils/statistic-utills.d.js +0 -2
- package/dist/lib/utils/statistic-utills.d.js.map +0 -1
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
export type ConversionDumpOptions = {
|
|
2
|
+
inputUrl: string;
|
|
3
|
+
outputPath: string;
|
|
4
|
+
tilesetName: string;
|
|
5
|
+
maxDepth: number;
|
|
6
|
+
slpk: boolean;
|
|
7
|
+
egmFilePath: string;
|
|
8
|
+
token: string;
|
|
9
|
+
draco: boolean;
|
|
10
|
+
mergeMaterials: boolean;
|
|
11
|
+
generateTextures: boolean;
|
|
12
|
+
generateBoundingVolumes: boolean;
|
|
13
|
+
metadataClass: string;
|
|
14
|
+
analyze: boolean;
|
|
15
|
+
};
|
|
16
|
+
type NodeDoneStatus = {
|
|
17
|
+
nodeId: number;
|
|
18
|
+
done: Record<string, boolean> | boolean;
|
|
19
|
+
};
|
|
20
|
+
type TilesConverted = {
|
|
21
|
+
nodes: NodeDoneStatus[];
|
|
22
|
+
};
|
|
23
|
+
export declare class ConversionDump {
|
|
24
|
+
/** Conversion options */
|
|
25
|
+
private options?;
|
|
26
|
+
/** Tiles conversion progress status map */
|
|
27
|
+
tilesConverted: Record<string, TilesConverted>;
|
|
28
|
+
constructor();
|
|
29
|
+
/**
|
|
30
|
+
* Create a dump file with convertion options
|
|
31
|
+
* @param options - converter options
|
|
32
|
+
*/
|
|
33
|
+
createDumpFile(options: ConversionDumpOptions): Promise<void>;
|
|
34
|
+
/**
|
|
35
|
+
* Update conversion status in the dump file
|
|
36
|
+
*/
|
|
37
|
+
private updateDumpFile;
|
|
38
|
+
/**
|
|
39
|
+
* Delete a dump file
|
|
40
|
+
*/
|
|
41
|
+
deleteDumpFile(): Promise<void>;
|
|
42
|
+
/**
|
|
43
|
+
* Get record from the tilesConverted Map
|
|
44
|
+
* @param fileName - source filename
|
|
45
|
+
* @returns existing object from the tilesConverted Map
|
|
46
|
+
*/
|
|
47
|
+
private getRecord;
|
|
48
|
+
/**
|
|
49
|
+
* Set a record for the dump file
|
|
50
|
+
* @param fileName - key - source filename
|
|
51
|
+
* @param object - value
|
|
52
|
+
*/
|
|
53
|
+
private setRecord;
|
|
54
|
+
/**
|
|
55
|
+
* Add a node into the dump file for the source file record
|
|
56
|
+
* @param fileName - source filename
|
|
57
|
+
* @param nodeId - nodeId of the node
|
|
58
|
+
*/
|
|
59
|
+
addNode(filename: string, nodeId: number): Promise<void>;
|
|
60
|
+
/**
|
|
61
|
+
* Update done status object for the writing resources
|
|
62
|
+
* @param fileName - key - source filename
|
|
63
|
+
* @param nodeId - nodeId for the source filename
|
|
64
|
+
* @param resourceType - resource type to update status
|
|
65
|
+
* @param value - value
|
|
66
|
+
*/
|
|
67
|
+
updateDoneStatus(filename: string, nodeId: number, resourceType: string, value: boolean): void;
|
|
68
|
+
/**
|
|
69
|
+
* Update dump file according to writing results
|
|
70
|
+
* @param changedRecords - array of parameters ids for the written resources
|
|
71
|
+
* @param writeResults - array of writing resource files results
|
|
72
|
+
*/
|
|
73
|
+
updateConvertedTilesDump(changedRecords: {
|
|
74
|
+
outputId?: number;
|
|
75
|
+
sourceId?: string;
|
|
76
|
+
resourceType?: string;
|
|
77
|
+
}[], writeResults: PromiseSettledResult<string | null>[]): Promise<void>;
|
|
78
|
+
}
|
|
79
|
+
export {};
|
|
80
|
+
//# sourceMappingURL=conversion-dump.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"conversion-dump.d.ts","sourceRoot":"","sources":["../../../src/lib/utils/conversion-dump.ts"],"names":[],"mappings":"AAIA,MAAM,MAAM,qBAAqB,GAAG;IAClC,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,CAAC;IACpB,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;IACd,WAAW,EAAE,MAAM,CAAC;IACpB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,OAAO,CAAC;IACf,cAAc,EAAE,OAAO,CAAC;IACxB,gBAAgB,EAAE,OAAO,CAAC;IAC1B,uBAAuB,EAAE,OAAO,CAAC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,OAAO,EAAE,OAAO,CAAC;CAClB,CAAC;AAEF,KAAK,cAAc,GAAG;IACpB,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,OAAO,CAAC;CACzC,CAAC;AAEF,KAAK,cAAc,GAAG;IACpB,KAAK,EAAE,cAAc,EAAE,CAAC;CACzB,CAAC;AAEF,qBAAa,cAAc;IACzB,yBAAyB;IACzB,OAAO,CAAC,OAAO,CAAC,CAAwB;IACxC,2CAA2C;IAC3C,cAAc,EAAE,MAAM,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC;;IAM/C;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;IA2CnE;;OAEG;YACW,cAAc;IAiB5B;;OAEG;IACG,cAAc,IAAI,OAAO,CAAC,IAAI,CAAC;IAQrC;;;;OAIG;IACH,OAAO,CAAC,SAAS;IAIjB;;;;OAIG;IACH,OAAO,CAAC,SAAS;IAIjB;;;;OAIG;IACG,OAAO,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;IAS9C;;;;;;OAMG;IACH,gBAAgB,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO;IASvF;;;;OAIG;IACG,wBAAwB,CAC5B,cAAc,EAAE;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,YAAY,CAAC,EAAE,MAAM,CAAA;KAAC,EAAE,EAC/E,YAAY,EAAE,oBAAoB,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;CAyBtD"}
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import { DUMP_FILE_SUFFIX } from "../../constants.js";
|
|
2
|
+
import { removeFile, writeFile } from "./file-utils.js";
|
|
3
|
+
import { join } from 'path';
|
|
4
|
+
export class ConversionDump {
|
|
5
|
+
constructor() {
|
|
6
|
+
this.options = void 0;
|
|
7
|
+
this.tilesConverted = void 0;
|
|
8
|
+
this.tilesConverted = {};
|
|
9
|
+
}
|
|
10
|
+
async createDumpFile(options) {
|
|
11
|
+
const {
|
|
12
|
+
tilesetName,
|
|
13
|
+
slpk,
|
|
14
|
+
egmFilePath,
|
|
15
|
+
inputUrl,
|
|
16
|
+
outputPath,
|
|
17
|
+
draco = true,
|
|
18
|
+
maxDepth,
|
|
19
|
+
token,
|
|
20
|
+
generateTextures,
|
|
21
|
+
generateBoundingVolumes,
|
|
22
|
+
mergeMaterials = true,
|
|
23
|
+
metadataClass,
|
|
24
|
+
analyze = false
|
|
25
|
+
} = options;
|
|
26
|
+
this.options = {
|
|
27
|
+
tilesetName,
|
|
28
|
+
slpk,
|
|
29
|
+
egmFilePath,
|
|
30
|
+
inputUrl,
|
|
31
|
+
outputPath,
|
|
32
|
+
draco,
|
|
33
|
+
maxDepth,
|
|
34
|
+
token,
|
|
35
|
+
generateTextures,
|
|
36
|
+
generateBoundingVolumes,
|
|
37
|
+
mergeMaterials,
|
|
38
|
+
metadataClass,
|
|
39
|
+
analyze
|
|
40
|
+
};
|
|
41
|
+
try {
|
|
42
|
+
await writeFile(options.outputPath, JSON.stringify({
|
|
43
|
+
options: this.options
|
|
44
|
+
}), `${options.tilesetName}${DUMP_FILE_SUFFIX}`);
|
|
45
|
+
} catch (error) {
|
|
46
|
+
console.log("Can't create dump file", error);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
async updateDumpFile() {
|
|
50
|
+
var _this$options;
|
|
51
|
+
if ((_this$options = this.options) !== null && _this$options !== void 0 && _this$options.outputPath && this.options.tilesetName) {
|
|
52
|
+
try {
|
|
53
|
+
await writeFile(this.options.outputPath, JSON.stringify({
|
|
54
|
+
options: this.options,
|
|
55
|
+
tilesConverted: this.tilesConverted
|
|
56
|
+
}), `${this.options.tilesetName}${DUMP_FILE_SUFFIX}`);
|
|
57
|
+
} catch (error) {
|
|
58
|
+
console.log("Can't update dump file", error);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
async deleteDumpFile() {
|
|
63
|
+
var _this$options2;
|
|
64
|
+
if ((_this$options2 = this.options) !== null && _this$options2 !== void 0 && _this$options2.outputPath && this.options.tilesetName) {
|
|
65
|
+
await removeFile(join(this.options.outputPath, `${this.options.tilesetName}${DUMP_FILE_SUFFIX}`));
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
getRecord(fileName) {
|
|
69
|
+
return this.tilesConverted[fileName];
|
|
70
|
+
}
|
|
71
|
+
setRecord(fileName, object) {
|
|
72
|
+
this.tilesConverted[fileName] = object;
|
|
73
|
+
}
|
|
74
|
+
async addNode(filename, nodeId) {
|
|
75
|
+
const {
|
|
76
|
+
nodes
|
|
77
|
+
} = this.getRecord(filename) || {
|
|
78
|
+
nodes: []
|
|
79
|
+
};
|
|
80
|
+
nodes.push({
|
|
81
|
+
nodeId,
|
|
82
|
+
done: {}
|
|
83
|
+
});
|
|
84
|
+
if (nodes.length === 1) {
|
|
85
|
+
this.setRecord(filename, {
|
|
86
|
+
nodes
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
await this.updateDumpFile();
|
|
90
|
+
}
|
|
91
|
+
updateDoneStatus(filename, nodeId, resourceType, value) {
|
|
92
|
+
var _this$tilesConverted$;
|
|
93
|
+
const nodeDump = (_this$tilesConverted$ = this.tilesConverted[filename]) === null || _this$tilesConverted$ === void 0 ? void 0 : _this$tilesConverted$.nodes.find(element => element.nodeId === nodeId);
|
|
94
|
+
if (nodeDump) {
|
|
95
|
+
nodeDump.done[resourceType] = value;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
async updateConvertedTilesDump(changedRecords, writeResults) {
|
|
99
|
+
for (let i = 0; i < changedRecords.length; i++) {
|
|
100
|
+
if (changedRecords[i] && 'value' in writeResults[i]) {
|
|
101
|
+
const {
|
|
102
|
+
sourceId,
|
|
103
|
+
resourceType,
|
|
104
|
+
outputId
|
|
105
|
+
} = changedRecords[i];
|
|
106
|
+
if (!sourceId || !resourceType || !outputId) continue;
|
|
107
|
+
for (const node of this.tilesConverted[sourceId].nodes) {
|
|
108
|
+
if (typeof node.done !== 'boolean' && node.nodeId === outputId) {
|
|
109
|
+
node.done[resourceType] = true;
|
|
110
|
+
}
|
|
111
|
+
if (typeof node.done !== 'boolean') {
|
|
112
|
+
let done = false;
|
|
113
|
+
for (const key in node.done) {
|
|
114
|
+
done = node.done[key];
|
|
115
|
+
if (!done) break;
|
|
116
|
+
}
|
|
117
|
+
if (done) {
|
|
118
|
+
node.done = true;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
await this.updateDumpFile();
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
//# sourceMappingURL=conversion-dump.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"conversion-dump.js","names":["DUMP_FILE_SUFFIX","removeFile","writeFile","join","ConversionDump","constructor","options","tilesConverted","createDumpFile","tilesetName","slpk","egmFilePath","inputUrl","outputPath","draco","maxDepth","token","generateTextures","generateBoundingVolumes","mergeMaterials","metadataClass","analyze","JSON","stringify","error","console","log","updateDumpFile","_this$options","deleteDumpFile","_this$options2","getRecord","fileName","setRecord","object","addNode","filename","nodeId","nodes","push","done","length","updateDoneStatus","resourceType","value","_this$tilesConverted$","nodeDump","find","element","updateConvertedTilesDump","changedRecords","writeResults","i","sourceId","outputId","node","key"],"sources":["../../../src/lib/utils/conversion-dump.ts"],"sourcesContent":["import {DUMP_FILE_SUFFIX} from '../../constants';\nimport {removeFile, writeFile} from './file-utils';\nimport {join} from 'path';\n\nexport type ConversionDumpOptions = {\n inputUrl: string;\n outputPath: string;\n tilesetName: string;\n maxDepth: number;\n slpk: boolean;\n egmFilePath: string;\n token: string;\n draco: boolean;\n mergeMaterials: boolean;\n generateTextures: boolean;\n generateBoundingVolumes: boolean;\n metadataClass: string;\n analyze: boolean;\n};\n\ntype NodeDoneStatus = {\n nodeId: number;\n done: Record<string, boolean> | boolean;\n};\n\ntype TilesConverted = {\n nodes: NodeDoneStatus[];\n};\n\nexport class ConversionDump {\n /** Conversion options */\n private options?: ConversionDumpOptions;\n /** Tiles conversion progress status map */\n tilesConverted: Record<string, TilesConverted>;\n\n constructor() {\n this.tilesConverted = {};\n }\n\n /**\n * Create a dump file with convertion options\n * @param options - converter options\n */\n async createDumpFile(options: ConversionDumpOptions): Promise<void> {\n const {\n tilesetName,\n slpk,\n egmFilePath,\n inputUrl,\n outputPath,\n draco = true,\n maxDepth,\n token,\n generateTextures,\n generateBoundingVolumes,\n mergeMaterials = true,\n metadataClass,\n analyze = false\n } = options;\n this.options = {\n tilesetName,\n slpk,\n egmFilePath,\n inputUrl,\n outputPath,\n draco,\n maxDepth,\n token,\n generateTextures,\n generateBoundingVolumes,\n mergeMaterials,\n metadataClass,\n analyze\n };\n\n try {\n await writeFile(\n options.outputPath,\n JSON.stringify({options: this.options}),\n `${options.tilesetName}${DUMP_FILE_SUFFIX}`\n );\n } catch (error) {\n console.log(\"Can't create dump file\", error);\n }\n }\n\n /**\n * Update conversion status in the dump file\n */\n private async updateDumpFile(): Promise<void> {\n if (this.options?.outputPath && this.options.tilesetName) {\n try {\n await writeFile(\n this.options.outputPath,\n JSON.stringify({\n options: this.options,\n tilesConverted: this.tilesConverted\n }),\n `${this.options.tilesetName}${DUMP_FILE_SUFFIX}`\n );\n } catch (error) {\n console.log(\"Can't update dump file\", error);\n }\n }\n }\n\n /**\n * Delete a dump file\n */\n async deleteDumpFile(): Promise<void> {\n if (this.options?.outputPath && this.options.tilesetName) {\n await removeFile(\n join(this.options.outputPath, `${this.options.tilesetName}${DUMP_FILE_SUFFIX}`)\n );\n }\n }\n\n /**\n * Get record from the tilesConverted Map\n * @param fileName - source filename\n * @returns existing object from the tilesConverted Map\n */\n private getRecord(fileName: string) {\n return this.tilesConverted[fileName];\n }\n\n /**\n * Set a record for the dump file\n * @param fileName - key - source filename\n * @param object - value\n */\n private setRecord(fileName: string, object: any) {\n this.tilesConverted[fileName] = object;\n }\n\n /**\n * Add a node into the dump file for the source file record\n * @param fileName - source filename\n * @param nodeId - nodeId of the node\n */\n async addNode(filename: string, nodeId: number) {\n const {nodes} = this.getRecord(filename) || {nodes: []};\n nodes.push({nodeId, done: {}});\n if (nodes.length === 1) {\n this.setRecord(filename, {nodes});\n }\n await this.updateDumpFile();\n }\n\n /**\n * Update done status object for the writing resources\n * @param fileName - key - source filename\n * @param nodeId - nodeId for the source filename\n * @param resourceType - resource type to update status\n * @param value - value\n */\n updateDoneStatus(filename: string, nodeId: number, resourceType: string, value: boolean) {\n const nodeDump = this.tilesConverted[filename]?.nodes.find(\n (element) => element.nodeId === nodeId\n );\n if (nodeDump) {\n nodeDump.done[resourceType] = value;\n }\n }\n\n /**\n * Update dump file according to writing results\n * @param changedRecords - array of parameters ids for the written resources\n * @param writeResults - array of writing resource files results\n */\n async updateConvertedTilesDump(\n changedRecords: {outputId?: number; sourceId?: string; resourceType?: string}[],\n writeResults: PromiseSettledResult<string | null>[]\n ) {\n for (let i = 0; i < changedRecords.length; i++) {\n if (changedRecords[i] && 'value' in writeResults[i]) {\n const {sourceId, resourceType, outputId} = changedRecords[i];\n if (!sourceId || !resourceType || !outputId) continue;\n for (const node of this.tilesConverted[sourceId].nodes) {\n if (typeof node.done !== 'boolean' && node.nodeId === outputId) {\n node.done[resourceType] = true;\n }\n if (typeof node.done !== 'boolean') {\n let done = false;\n for (const key in node.done) {\n done = node.done[key];\n if (!done) break;\n }\n if (done) {\n node.done = true;\n }\n }\n }\n }\n }\n await this.updateDumpFile();\n }\n}\n"],"mappings":"SAAQA,gBAAgB;AAAA,SAChBC,UAAU,EAAEC,SAAS;AAC7B,SAAQC,IAAI,QAAO,MAAM;AA2BzB,OAAO,MAAMC,cAAc,CAAC;EAM1BC,WAAWA,CAAA,EAAG;IAAA,KAJNC,OAAO;IAAA,KAEfC,cAAc;IAGZ,IAAI,CAACA,cAAc,GAAG,CAAC,CAAC;EAC1B;EAMA,MAAMC,cAAcA,CAACF,OAA8B,EAAiB;IAClE,MAAM;MACJG,WAAW;MACXC,IAAI;MACJC,WAAW;MACXC,QAAQ;MACRC,UAAU;MACVC,KAAK,GAAG,IAAI;MACZC,QAAQ;MACRC,KAAK;MACLC,gBAAgB;MAChBC,uBAAuB;MACvBC,cAAc,GAAG,IAAI;MACrBC,aAAa;MACbC,OAAO,GAAG;IACZ,CAAC,GAAGf,OAAO;IACX,IAAI,CAACA,OAAO,GAAG;MACbG,WAAW;MACXC,IAAI;MACJC,WAAW;MACXC,QAAQ;MACRC,UAAU;MACVC,KAAK;MACLC,QAAQ;MACRC,KAAK;MACLC,gBAAgB;MAChBC,uBAAuB;MACvBC,cAAc;MACdC,aAAa;MACbC;IACF,CAAC;IAED,IAAI;MACF,MAAMnB,SAAS,CACbI,OAAO,CAACO,UAAU,EAClBS,IAAI,CAACC,SAAS,CAAC;QAACjB,OAAO,EAAE,IAAI,CAACA;MAAO,CAAC,CAAC,EACtC,GAAEA,OAAO,CAACG,WAAY,GAAET,gBAAiB,EAC5C,CAAC;IACH,CAAC,CAAC,OAAOwB,KAAK,EAAE;MACdC,OAAO,CAACC,GAAG,CAAC,wBAAwB,EAAEF,KAAK,CAAC;IAC9C;EACF;EAKA,MAAcG,cAAcA,CAAA,EAAkB;IAAA,IAAAC,aAAA;IAC5C,IAAI,CAAAA,aAAA,OAAI,CAACtB,OAAO,cAAAsB,aAAA,eAAZA,aAAA,CAAcf,UAAU,IAAI,IAAI,CAACP,OAAO,CAACG,WAAW,EAAE;MACxD,IAAI;QACF,MAAMP,SAAS,CACb,IAAI,CAACI,OAAO,CAACO,UAAU,EACvBS,IAAI,CAACC,SAAS,CAAC;UACbjB,OAAO,EAAE,IAAI,CAACA,OAAO;UACrBC,cAAc,EAAE,IAAI,CAACA;QACvB,CAAC,CAAC,EACD,GAAE,IAAI,CAACD,OAAO,CAACG,WAAY,GAAET,gBAAiB,EACjD,CAAC;MACH,CAAC,CAAC,OAAOwB,KAAK,EAAE;QACdC,OAAO,CAACC,GAAG,CAAC,wBAAwB,EAAEF,KAAK,CAAC;MAC9C;IACF;EACF;EAKA,MAAMK,cAAcA,CAAA,EAAkB;IAAA,IAAAC,cAAA;IACpC,IAAI,CAAAA,cAAA,OAAI,CAACxB,OAAO,cAAAwB,cAAA,eAAZA,cAAA,CAAcjB,UAAU,IAAI,IAAI,CAACP,OAAO,CAACG,WAAW,EAAE;MACxD,MAAMR,UAAU,CACdE,IAAI,CAAC,IAAI,CAACG,OAAO,CAACO,UAAU,EAAG,GAAE,IAAI,CAACP,OAAO,CAACG,WAAY,GAAET,gBAAiB,EAAC,CAChF,CAAC;IACH;EACF;EAOQ+B,SAASA,CAACC,QAAgB,EAAE;IAClC,OAAO,IAAI,CAACzB,cAAc,CAACyB,QAAQ,CAAC;EACtC;EAOQC,SAASA,CAACD,QAAgB,EAAEE,MAAW,EAAE;IAC/C,IAAI,CAAC3B,cAAc,CAACyB,QAAQ,CAAC,GAAGE,MAAM;EACxC;EAOA,MAAMC,OAAOA,CAACC,QAAgB,EAAEC,MAAc,EAAE;IAC9C,MAAM;MAACC;IAAK,CAAC,GAAG,IAAI,CAACP,SAAS,CAACK,QAAQ,CAAC,IAAI;MAACE,KAAK,EAAE;IAAE,CAAC;IACvDA,KAAK,CAACC,IAAI,CAAC;MAACF,MAAM;MAAEG,IAAI,EAAE,CAAC;IAAC,CAAC,CAAC;IAC9B,IAAIF,KAAK,CAACG,MAAM,KAAK,CAAC,EAAE;MACtB,IAAI,CAACR,SAAS,CAACG,QAAQ,EAAE;QAACE;MAAK,CAAC,CAAC;IACnC;IACA,MAAM,IAAI,CAACX,cAAc,CAAC,CAAC;EAC7B;EASAe,gBAAgBA,CAACN,QAAgB,EAAEC,MAAc,EAAEM,YAAoB,EAAEC,KAAc,EAAE;IAAA,IAAAC,qBAAA;IACvF,MAAMC,QAAQ,IAAAD,qBAAA,GAAG,IAAI,CAACtC,cAAc,CAAC6B,QAAQ,CAAC,cAAAS,qBAAA,uBAA7BA,qBAAA,CAA+BP,KAAK,CAACS,IAAI,CACvDC,OAAO,IAAKA,OAAO,CAACX,MAAM,KAAKA,MAClC,CAAC;IACD,IAAIS,QAAQ,EAAE;MACZA,QAAQ,CAACN,IAAI,CAACG,YAAY,CAAC,GAAGC,KAAK;IACrC;EACF;EAOA,MAAMK,wBAAwBA,CAC5BC,cAA+E,EAC/EC,YAAmD,EACnD;IACA,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,cAAc,CAACT,MAAM,EAAEW,CAAC,EAAE,EAAE;MAC9C,IAAIF,cAAc,CAACE,CAAC,CAAC,IAAI,OAAO,IAAID,YAAY,CAACC,CAAC,CAAC,EAAE;QACnD,MAAM;UAACC,QAAQ;UAAEV,YAAY;UAAEW;QAAQ,CAAC,GAAGJ,cAAc,CAACE,CAAC,CAAC;QAC5D,IAAI,CAACC,QAAQ,IAAI,CAACV,YAAY,IAAI,CAACW,QAAQ,EAAE;QAC7C,KAAK,MAAMC,IAAI,IAAI,IAAI,CAAChD,cAAc,CAAC8C,QAAQ,CAAC,CAACf,KAAK,EAAE;UACtD,IAAI,OAAOiB,IAAI,CAACf,IAAI,KAAK,SAAS,IAAIe,IAAI,CAAClB,MAAM,KAAKiB,QAAQ,EAAE;YAC9DC,IAAI,CAACf,IAAI,CAACG,YAAY,CAAC,GAAG,IAAI;UAChC;UACA,IAAI,OAAOY,IAAI,CAACf,IAAI,KAAK,SAAS,EAAE;YAClC,IAAIA,IAAI,GAAG,KAAK;YAChB,KAAK,MAAMgB,GAAG,IAAID,IAAI,CAACf,IAAI,EAAE;cAC3BA,IAAI,GAAGe,IAAI,CAACf,IAAI,CAACgB,GAAG,CAAC;cACrB,IAAI,CAAChB,IAAI,EAAE;YACb;YACA,IAAIA,IAAI,EAAE;cACRe,IAAI,CAACf,IAAI,GAAG,IAAI;YAClB;UACF;QACF;MACF;IACF;IACA,MAAM,IAAI,CAACb,cAAc,CAAC,CAAC;EAC7B;AACF"}
|
|
@@ -1,8 +1,25 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
3
|
-
* @param
|
|
4
|
-
* @
|
|
2
|
+
* Do milliseconds time conversion to readable time string.
|
|
3
|
+
* @param tile - 3d-tiles tile Object
|
|
4
|
+
* @param coordinates - node converted coordinates
|
|
5
|
+
* @returns String which characterizes conversion time period
|
|
5
6
|
*/
|
|
6
|
-
export
|
|
7
|
-
|
|
8
|
-
|
|
7
|
+
export function timeConverter(time: [number, number]): String;
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Calculate files sizes after conversion.
|
|
11
|
+
* @param params - Object with params of conversion.
|
|
12
|
+
* @returns Promise with generated files size in bytes.
|
|
13
|
+
*/
|
|
14
|
+
export function calculateFilesSize(params: {
|
|
15
|
+
slpk: boolean;
|
|
16
|
+
outputPath: string;
|
|
17
|
+
tilesetName: string;
|
|
18
|
+
}): Number;
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Reqursivelly calculate files sizes in directory.
|
|
22
|
+
* @param dirPath - Directory path.
|
|
23
|
+
* @returns Promise with files size in directory.
|
|
24
|
+
*/
|
|
25
|
+
export function getTotalFilesSize(dirPath: string): Number;
|
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import { Queue } from './queue';
|
|
2
|
+
import { ConversionDump } from './conversion-dump';
|
|
2
3
|
export type WriteQueueItem = {
|
|
3
4
|
archiveKey?: string;
|
|
5
|
+
sourceId?: string;
|
|
6
|
+
outputId?: number;
|
|
7
|
+
resourceType?: string;
|
|
4
8
|
/**
|
|
5
9
|
* writePromise() returns a Promise that will be awaited in Promise.allSettled(promises);
|
|
6
10
|
* Arguments for this call are specified in writeQueue.enqueue call like this:
|
|
@@ -21,13 +25,14 @@ export type WriteQueueItem = {
|
|
|
21
25
|
};
|
|
22
26
|
export default class WriteQueue<T extends WriteQueueItem> extends Queue<T> {
|
|
23
27
|
private intervalId?;
|
|
28
|
+
private conversionDump;
|
|
24
29
|
writePromise: Promise<void> | null;
|
|
25
30
|
fileMap: {
|
|
26
31
|
[key: string]: string;
|
|
27
32
|
};
|
|
28
33
|
listeningInterval: number;
|
|
29
34
|
writeConcurrency: number;
|
|
30
|
-
constructor(listeningInterval?: number, writeConcurrency?: number);
|
|
35
|
+
constructor(conversionDump: ConversionDump, listeningInterval?: number, writeConcurrency?: number);
|
|
31
36
|
enqueue(val: T, writeImmediately?: boolean): Promise<void>;
|
|
32
37
|
startListening(): void;
|
|
33
38
|
stopListening(): void;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"write-queue.d.ts","sourceRoot":"","sources":["../../../src/lib/utils/write-queue.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,KAAK,EAAC,MAAM,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"write-queue.d.ts","sourceRoot":"","sources":["../../../src/lib/utils/write-queue.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,KAAK,EAAC,MAAM,SAAS,CAAC;AAE9B,OAAO,EAAC,cAAc,EAAC,MAAM,mBAAmB,CAAC;AAKjD,MAAM,MAAM,cAAc,GAAG;IAC3B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;;;;;;;;;;;;;OAeG;IACH,YAAY,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC;CAC5C,CAAC;AAEF,MAAM,CAAC,OAAO,OAAO,UAAU,CAAC,CAAC,SAAS,cAAc,CAAE,SAAQ,KAAK,CAAC,CAAC,CAAC;IACxE,OAAO,CAAC,UAAU,CAAC,CAAiB;IACpC,OAAO,CAAC,cAAc,CAAiB;IAChC,YAAY,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAQ;IAC1C,OAAO,EAAE;QAAC,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAC,CAAM;IACtC,iBAAiB,EAAE,MAAM,CAAC;IAC1B,gBAAgB,EAAE,MAAM,CAAC;gBAG9B,cAAc,EAAE,cAAc,EAC9B,iBAAiB,GAAE,MAAa,EAChC,gBAAgB,GAAE,MAAY;IAQ1B,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,gBAAgB,GAAE,OAAe;IAgBvD,cAAc;IAId,aAAa;IAMP,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ3B,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;YAKjB,OAAO;IAsBrB,OAAO,CAAC,aAAa;CAWtB"}
|
|
@@ -2,15 +2,17 @@ import { Queue } from "./queue.js";
|
|
|
2
2
|
import process from 'process';
|
|
3
3
|
const MEMORY_LIMIT = 4 * 1024 * 1024 * 1024;
|
|
4
4
|
export default class WriteQueue extends Queue {
|
|
5
|
-
constructor() {
|
|
6
|
-
let listeningInterval = arguments.length >
|
|
7
|
-
let writeConcurrency = arguments.length >
|
|
5
|
+
constructor(conversionDump) {
|
|
6
|
+
let listeningInterval = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 2000;
|
|
7
|
+
let writeConcurrency = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 400;
|
|
8
8
|
super();
|
|
9
9
|
this.intervalId = void 0;
|
|
10
|
+
this.conversionDump = void 0;
|
|
10
11
|
this.writePromise = null;
|
|
11
12
|
this.fileMap = {};
|
|
12
13
|
this.listeningInterval = void 0;
|
|
13
14
|
this.writeConcurrency = void 0;
|
|
15
|
+
this.conversionDump = conversionDump;
|
|
14
16
|
this.listeningInterval = listeningInterval;
|
|
15
17
|
this.writeConcurrency = writeConcurrency;
|
|
16
18
|
}
|
|
@@ -55,6 +57,7 @@ export default class WriteQueue extends Queue {
|
|
|
55
57
|
while (this.length) {
|
|
56
58
|
const promises = [];
|
|
57
59
|
const archiveKeys = [];
|
|
60
|
+
const changedRecords = [];
|
|
58
61
|
for (let i = 0; i < this.writeConcurrency; i++) {
|
|
59
62
|
const item = this.dequeue();
|
|
60
63
|
if (!item) {
|
|
@@ -62,14 +65,23 @@ export default class WriteQueue extends Queue {
|
|
|
62
65
|
}
|
|
63
66
|
const {
|
|
64
67
|
archiveKey,
|
|
68
|
+
sourceId,
|
|
69
|
+
outputId,
|
|
70
|
+
resourceType,
|
|
65
71
|
writePromise
|
|
66
72
|
} = item;
|
|
67
73
|
archiveKeys.push(archiveKey);
|
|
74
|
+
changedRecords.push({
|
|
75
|
+
sourceId,
|
|
76
|
+
outputId,
|
|
77
|
+
resourceType
|
|
78
|
+
});
|
|
68
79
|
const promise = writePromise();
|
|
69
80
|
promises.push(promise);
|
|
70
81
|
}
|
|
71
82
|
const writeResults = await Promise.allSettled(promises);
|
|
72
83
|
this.updateFileMap(archiveKeys, writeResults);
|
|
84
|
+
await this.conversionDump.updateConvertedTilesDump(changedRecords, writeResults);
|
|
73
85
|
}
|
|
74
86
|
}
|
|
75
87
|
updateFileMap(archiveKeys, writeResults) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"write-queue.js","names":["Queue","process","MEMORY_LIMIT","WriteQueue","constructor","listeningInterval","arguments","length","undefined","writeConcurrency","intervalId","writePromise","fileMap","enqueue","val","writeImmediately","archiveKey","result","memoryUsage","rss","startWrite","startListening","setInterval","bind","stopListening","clearInterval","doWrite","finalize","promises","archiveKeys","i","item","dequeue","push","promise","writeResults","Promise","allSettled","updateFileMap","value"],"sources":["../../../src/lib/utils/write-queue.ts"],"sourcesContent":["import {Queue} from './queue';\nimport process from 'process';\n\n/** Memory limit size is based on testing */\nconst MEMORY_LIMIT = 4 * 1024 * 1024 * 1024; // 4GB\n\nexport type WriteQueueItem = {\n archiveKey?: string;\n /**\n * writePromise() returns a Promise that will be awaited in Promise.allSettled(promises);\n * Arguments for this call are specified in writeQueue.enqueue call like this:\n * await writeQueue.enqueue({\n * archiveKey: `nodePages/xxx.json.gz`,\n * writePromise: () => writeFileForSlpk(slpkPath, data, `xxx.json`)\n * });\n * Note, a function like writeFileForSlpk should NOT be called when initializing the object for enqueue().\n * If he function is called, the promise will be created\n * and the function will allocate resources (file descriptors) for file writing.\n * It will be done for ALL items in the queue, which is not supposed to happen.\n * That's why the function should be passed as\n * writePromise: () => writeFileForSlpk(slpkPath, content, `xxx.json`)\n * instead of\n * writePromise: writeFileForSlpk(slpkPath, content, `xxx.json`) // INCORRECT !\n */\n writePromise: () => Promise<string | null>;\n};\n\nexport default class WriteQueue<T extends WriteQueueItem> extends Queue<T> {\n private intervalId?: NodeJS.Timeout;\n public writePromise: Promise<void> | null = null;\n public fileMap: {[key: string]: string} = {};\n public listeningInterval: number;\n public writeConcurrency: number;\n\n constructor(listeningInterval: number = 2000
|
|
1
|
+
{"version":3,"file":"write-queue.js","names":["Queue","process","MEMORY_LIMIT","WriteQueue","constructor","conversionDump","listeningInterval","arguments","length","undefined","writeConcurrency","intervalId","writePromise","fileMap","enqueue","val","writeImmediately","archiveKey","result","memoryUsage","rss","startWrite","startListening","setInterval","bind","stopListening","clearInterval","doWrite","finalize","promises","archiveKeys","changedRecords","i","item","dequeue","sourceId","outputId","resourceType","push","promise","writeResults","Promise","allSettled","updateFileMap","updateConvertedTilesDump","value"],"sources":["../../../src/lib/utils/write-queue.ts"],"sourcesContent":["import {Queue} from './queue';\nimport process from 'process';\nimport {ConversionDump} from './conversion-dump';\n\n/** Memory limit size is based on testing */\nconst MEMORY_LIMIT = 4 * 1024 * 1024 * 1024; // 4GB\n\nexport type WriteQueueItem = {\n archiveKey?: string;\n sourceId?: string;\n outputId?: number;\n resourceType?: string;\n /**\n * writePromise() returns a Promise that will be awaited in Promise.allSettled(promises);\n * Arguments for this call are specified in writeQueue.enqueue call like this:\n * await writeQueue.enqueue({\n * archiveKey: `nodePages/xxx.json.gz`,\n * writePromise: () => writeFileForSlpk(slpkPath, data, `xxx.json`)\n * });\n * Note, a function like writeFileForSlpk should NOT be called when initializing the object for enqueue().\n * If he function is called, the promise will be created\n * and the function will allocate resources (file descriptors) for file writing.\n * It will be done for ALL items in the queue, which is not supposed to happen.\n * That's why the function should be passed as\n * writePromise: () => writeFileForSlpk(slpkPath, content, `xxx.json`)\n * instead of\n * writePromise: writeFileForSlpk(slpkPath, content, `xxx.json`) // INCORRECT !\n */\n writePromise: () => Promise<string | null>;\n};\n\nexport default class WriteQueue<T extends WriteQueueItem> extends Queue<T> {\n private intervalId?: NodeJS.Timeout;\n private conversionDump: ConversionDump;\n public writePromise: Promise<void> | null = null;\n public fileMap: {[key: string]: string} = {};\n public listeningInterval: number;\n public writeConcurrency: number;\n\n constructor(\n conversionDump: ConversionDump,\n listeningInterval: number = 2000,\n writeConcurrency: number = 400\n ) {\n super();\n this.conversionDump = conversionDump;\n this.listeningInterval = listeningInterval;\n this.writeConcurrency = writeConcurrency;\n }\n\n async enqueue(val: T, writeImmediately: boolean = false) {\n if (writeImmediately) {\n const {archiveKey, writePromise} = val as WriteQueueItem;\n const result = await writePromise();\n if (archiveKey && result) {\n this.fileMap[archiveKey] = result;\n }\n } else {\n super.enqueue(val);\n /** https://nodejs.org/docs/latest-v14.x/api/process.html#process_process_memoryusage */\n if (process.memoryUsage().rss > MEMORY_LIMIT) {\n await this.startWrite();\n }\n }\n }\n\n startListening() {\n this.intervalId = setInterval(this.startWrite.bind(this), this.listeningInterval);\n }\n\n stopListening() {\n if (this.intervalId) {\n clearInterval(this.intervalId);\n }\n }\n\n async startWrite(): Promise<void> {\n if (!this.writePromise) {\n this.writePromise = this.doWrite();\n }\n await this.writePromise;\n this.writePromise = null;\n }\n\n async finalize(): Promise<void> {\n this.stopListening();\n await this.startWrite();\n }\n\n private async doWrite(): Promise<void> {\n while (this.length) {\n const promises: Promise<string | null>[] = [];\n const archiveKeys: (string | undefined)[] = [];\n const changedRecords: {outputId?: number; sourceId?: string; resourceType?: string}[] = [];\n for (let i = 0; i < this.writeConcurrency; i++) {\n const item = this.dequeue();\n if (!item) {\n break;\n }\n const {archiveKey, sourceId, outputId, resourceType, writePromise} = item as WriteQueueItem;\n archiveKeys.push(archiveKey);\n changedRecords.push({sourceId, outputId, resourceType});\n const promise = writePromise();\n promises.push(promise);\n }\n const writeResults = await Promise.allSettled(promises);\n this.updateFileMap(archiveKeys, writeResults);\n await this.conversionDump.updateConvertedTilesDump(changedRecords, writeResults);\n }\n }\n\n private updateFileMap(\n archiveKeys: (string | undefined)[],\n writeResults: PromiseSettledResult<string | null>[]\n ) {\n for (let i = 0; i < archiveKeys.length; i++) {\n const archiveKey = archiveKeys[i];\n if (archiveKey && 'value' in writeResults[i]) {\n this.fileMap[archiveKey] = (writeResults[i] as PromiseFulfilledResult<string>).value;\n }\n }\n }\n}\n"],"mappings":"SAAQA,KAAK;AACb,OAAOC,OAAO,MAAM,SAAS;AAI7B,MAAMC,YAAY,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,GAAG,IAAI;AA0B3C,eAAe,MAAMC,UAAU,SAAmCH,KAAK,CAAI;EAQzEI,WAAWA,CACTC,cAA8B,EAG9B;IAAA,IAFAC,iBAAyB,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,IAAI;IAAA,IAChCG,gBAAwB,GAAAH,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,GAAG;IAE9B,KAAK,CAAC,CAAC;IAAC,KAZFI,UAAU;IAAA,KACVN,cAAc;IAAA,KACfO,YAAY,GAAyB,IAAI;IAAA,KACzCC,OAAO,GAA4B,CAAC,CAAC;IAAA,KACrCP,iBAAiB;IAAA,KACjBI,gBAAgB;IAQrB,IAAI,CAACL,cAAc,GAAGA,cAAc;IACpC,IAAI,CAACC,iBAAiB,GAAGA,iBAAiB;IAC1C,IAAI,CAACI,gBAAgB,GAAGA,gBAAgB;EAC1C;EAEA,MAAMI,OAAOA,CAACC,GAAM,EAAqC;IAAA,IAAnCC,gBAAyB,GAAAT,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,KAAK;IACrD,IAAIS,gBAAgB,EAAE;MACpB,MAAM;QAACC,UAAU;QAAEL;MAAY,CAAC,GAAGG,GAAqB;MACxD,MAAMG,MAAM,GAAG,MAAMN,YAAY,CAAC,CAAC;MACnC,IAAIK,UAAU,IAAIC,MAAM,EAAE;QACxB,IAAI,CAACL,OAAO,CAACI,UAAU,CAAC,GAAGC,MAAM;MACnC;IACF,CAAC,MAAM;MACL,KAAK,CAACJ,OAAO,CAACC,GAAG,CAAC;MAElB,IAAId,OAAO,CAACkB,WAAW,CAAC,CAAC,CAACC,GAAG,GAAGlB,YAAY,EAAE;QAC5C,MAAM,IAAI,CAACmB,UAAU,CAAC,CAAC;MACzB;IACF;EACF;EAEAC,cAAcA,CAAA,EAAG;IACf,IAAI,CAACX,UAAU,GAAGY,WAAW,CAAC,IAAI,CAACF,UAAU,CAACG,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAClB,iBAAiB,CAAC;EACnF;EAEAmB,aAAaA,CAAA,EAAG;IACd,IAAI,IAAI,CAACd,UAAU,EAAE;MACnBe,aAAa,CAAC,IAAI,CAACf,UAAU,CAAC;IAChC;EACF;EAEA,MAAMU,UAAUA,CAAA,EAAkB;IAChC,IAAI,CAAC,IAAI,CAACT,YAAY,EAAE;MACtB,IAAI,CAACA,YAAY,GAAG,IAAI,CAACe,OAAO,CAAC,CAAC;IACpC;IACA,MAAM,IAAI,CAACf,YAAY;IACvB,IAAI,CAACA,YAAY,GAAG,IAAI;EAC1B;EAEA,MAAMgB,QAAQA,CAAA,EAAkB;IAC9B,IAAI,CAACH,aAAa,CAAC,CAAC;IACpB,MAAM,IAAI,CAACJ,UAAU,CAAC,CAAC;EACzB;EAEA,MAAcM,OAAOA,CAAA,EAAkB;IACrC,OAAO,IAAI,CAACnB,MAAM,EAAE;MAClB,MAAMqB,QAAkC,GAAG,EAAE;MAC7C,MAAMC,WAAmC,GAAG,EAAE;MAC9C,MAAMC,cAA+E,GAAG,EAAE;MAC1F,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG,IAAI,CAACtB,gBAAgB,EAAEsB,CAAC,EAAE,EAAE;QAC9C,MAAMC,IAAI,GAAG,IAAI,CAACC,OAAO,CAAC,CAAC;QAC3B,IAAI,CAACD,IAAI,EAAE;UACT;QACF;QACA,MAAM;UAAChB,UAAU;UAAEkB,QAAQ;UAAEC,QAAQ;UAAEC,YAAY;UAAEzB;QAAY,CAAC,GAAGqB,IAAsB;QAC3FH,WAAW,CAACQ,IAAI,CAACrB,UAAU,CAAC;QAC5Bc,cAAc,CAACO,IAAI,CAAC;UAACH,QAAQ;UAAEC,QAAQ;UAAEC;QAAY,CAAC,CAAC;QACvD,MAAME,OAAO,GAAG3B,YAAY,CAAC,CAAC;QAC9BiB,QAAQ,CAACS,IAAI,CAACC,OAAO,CAAC;MACxB;MACA,MAAMC,YAAY,GAAG,MAAMC,OAAO,CAACC,UAAU,CAACb,QAAQ,CAAC;MACvD,IAAI,CAACc,aAAa,CAACb,WAAW,EAAEU,YAAY,CAAC;MAC7C,MAAM,IAAI,CAACnC,cAAc,CAACuC,wBAAwB,CAACb,cAAc,EAAES,YAAY,CAAC;IAClF;EACF;EAEQG,aAAaA,CACnBb,WAAmC,EACnCU,YAAmD,EACnD;IACA,KAAK,IAAIR,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,WAAW,CAACtB,MAAM,EAAEwB,CAAC,EAAE,EAAE;MAC3C,MAAMf,UAAU,GAAGa,WAAW,CAACE,CAAC,CAAC;MACjC,IAAIf,UAAU,IAAI,OAAO,IAAIuB,YAAY,CAACR,CAAC,CAAC,EAAE;QAC5C,IAAI,CAACnB,OAAO,CAACI,UAAU,CAAC,GAAIuB,YAAY,CAACR,CAAC,CAAC,CAAoCa,KAAK;MACtF;IACF;EACF;AACF"}
|
package/dist/pgm-loader.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { Geoid, parsePGM } from '@math.gl/geoid';
|
|
2
|
-
const VERSION = typeof
|
|
2
|
+
const VERSION = typeof "4.1.0-alpha.10" !== 'undefined' ? "4.1.0-alpha.10" : 'latest';
|
|
3
3
|
export { Geoid };
|
|
4
4
|
export const PGMLoader = {
|
|
5
5
|
name: 'PGM - Netpbm grayscale image format',
|
package/dist/pgm-loader.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pgm-loader.js","names":["Geoid","parsePGM","VERSION","
|
|
1
|
+
{"version":3,"file":"pgm-loader.js","names":["Geoid","parsePGM","VERSION","PGMLoader","name","id","module","version","mimeTypes","parse","arrayBuffer","options","Uint8Array","pgm","extensions","cubic"],"sources":["../src/pgm-loader.ts"],"sourcesContent":["import type {LoaderWithParser, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {Geoid, parsePGM} from '@math.gl/geoid';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport {Geoid};\n\nexport type PGMLoaderOptions = LoaderOptions & {\n pgm?: {\n cubic?: boolean;\n };\n};\n\n/**\n * Loader for PGM - Netpbm grayscale image format\n */\nexport const PGMLoader: LoaderWithParser<Geoid, never, PGMLoaderOptions> = {\n name: 'PGM - Netpbm grayscale image format',\n id: 'pgm',\n module: 'tile-converter',\n version: VERSION,\n mimeTypes: ['image/x-portable-graymap'],\n parse: async (arrayBuffer, options) => parsePGM(new Uint8Array(arrayBuffer), options?.pgm || {}),\n extensions: ['pgm'],\n options: {\n pgm: {\n cubic: false\n }\n }\n};\n"],"mappings":"AACA,SAAQA,KAAK,EAAEC,QAAQ,QAAO,gBAAgB;AAI9C,MAAMC,OAAO,GAAG,uBAAkB,KAAK,WAAW,sBAAiB,QAAQ;AAE3E,SAAQF,KAAK;AAWb,OAAO,MAAMG,SAA2D,GAAG;EACzEC,IAAI,EAAE,qCAAqC;EAC3CC,EAAE,EAAE,KAAK;EACTC,MAAM,EAAE,gBAAgB;EACxBC,OAAO,EAAEL,OAAO;EAChBM,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,KAAK,EAAE,MAAAA,CAAOC,WAAW,EAAEC,OAAO,KAAKV,QAAQ,CAAC,IAAIW,UAAU,CAACF,WAAW,CAAC,EAAE,CAAAC,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEE,GAAG,KAAI,CAAC,CAAC,CAAC;EAChGC,UAAU,EAAE,CAAC,KAAK,CAAC;EACnBH,OAAO,EAAE;IACPE,GAAG,EAAE;MACHE,KAAK,EAAE;IACT;EACF;AACF,CAAC"}
|