@srsergio/taptapp-ar 1.0.8 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/compiler/offline-compiler.d.ts +5 -36
- package/dist/compiler/offline-compiler.js +75 -10
- package/dist/compiler/utils/worker-pool.d.ts +2 -1
- package/dist/compiler/utils/worker-pool.js +4 -8
- package/package.json +1 -1
- package/src/compiler/offline-compiler.js +83 -12
- package/src/compiler/utils/worker-pool.js +4 -8
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
*/
|
|
4
4
|
export class OfflineCompiler {
|
|
5
5
|
data: any;
|
|
6
|
-
workerPool:
|
|
6
|
+
workerPool: WorkerPool | null;
|
|
7
7
|
_initNodeWorkers(): Promise<void>;
|
|
8
8
|
/**
|
|
9
9
|
* Compila una lista de imágenes objetivo
|
|
@@ -15,27 +15,11 @@ export class OfflineCompiler {
|
|
|
15
15
|
/**
|
|
16
16
|
* Compila datos de matching usando DetectorLite (JS puro)
|
|
17
17
|
*/
|
|
18
|
-
_compileMatch(targetImages: any, progressCallback: any): Promise<
|
|
19
|
-
maximaPoints: any[];
|
|
20
|
-
minimaPoints: any[];
|
|
21
|
-
maximaPointsCluster: {
|
|
22
|
-
rootNode: {
|
|
23
|
-
centerPointIndex: any;
|
|
24
|
-
};
|
|
25
|
-
};
|
|
26
|
-
minimaPointsCluster: {
|
|
27
|
-
rootNode: {
|
|
28
|
-
centerPointIndex: any;
|
|
29
|
-
};
|
|
30
|
-
};
|
|
31
|
-
width: any;
|
|
32
|
-
height: any;
|
|
33
|
-
scale: any;
|
|
34
|
-
}[][]>;
|
|
18
|
+
_compileMatch(targetImages: any, progressCallback: any): Promise<any[]>;
|
|
35
19
|
/**
|
|
36
20
|
* Compila datos de tracking usando extractTrackingFeatures (JS puro)
|
|
37
21
|
*/
|
|
38
|
-
_compileTrack(targetImages: any, progressCallback: any): Promise<
|
|
22
|
+
_compileTrack(targetImages: any, progressCallback: any): Promise<any[]>;
|
|
39
23
|
/**
|
|
40
24
|
* Método público para compilar tracking (compatibilidad con API anterior)
|
|
41
25
|
* @param {Object} options - Opciones de compilación
|
|
@@ -56,23 +40,7 @@ export class OfflineCompiler {
|
|
|
56
40
|
progressCallback: any;
|
|
57
41
|
targetImages: any;
|
|
58
42
|
basePercent?: number | undefined;
|
|
59
|
-
}): Promise<
|
|
60
|
-
maximaPoints: any[];
|
|
61
|
-
minimaPoints: any[];
|
|
62
|
-
maximaPointsCluster: {
|
|
63
|
-
rootNode: {
|
|
64
|
-
centerPointIndex: any;
|
|
65
|
-
};
|
|
66
|
-
};
|
|
67
|
-
minimaPointsCluster: {
|
|
68
|
-
rootNode: {
|
|
69
|
-
centerPointIndex: any;
|
|
70
|
-
};
|
|
71
|
-
};
|
|
72
|
-
width: any;
|
|
73
|
-
height: any;
|
|
74
|
-
scale: any;
|
|
75
|
-
}[][]>;
|
|
43
|
+
}): Promise<any[]>;
|
|
76
44
|
/**
|
|
77
45
|
* Exporta datos compilados en formato binario columnar optimizado
|
|
78
46
|
*/
|
|
@@ -173,3 +141,4 @@ export class OfflineCompiler {
|
|
|
173
141
|
*/
|
|
174
142
|
destroy(): Promise<void>;
|
|
175
143
|
}
|
|
144
|
+
import { WorkerPool } from "./utils/worker-pool.js";
|
|
@@ -19,6 +19,7 @@ import { extractTrackingFeatures } from "./tracker/extract-utils.js";
|
|
|
19
19
|
import { DetectorLite } from "./detector/detector-lite.js";
|
|
20
20
|
import { build as hierarchicalClusteringBuild } from "./matching/hierarchical-clustering.js";
|
|
21
21
|
import * as msgpack from "@msgpack/msgpack";
|
|
22
|
+
import { WorkerPool } from "./utils/worker-pool.js";
|
|
22
23
|
// Detect environment
|
|
23
24
|
const isNode = typeof process !== "undefined" &&
|
|
24
25
|
process.versions != null &&
|
|
@@ -41,21 +42,27 @@ export class OfflineCompiler {
|
|
|
41
42
|
}
|
|
42
43
|
async _initNodeWorkers() {
|
|
43
44
|
try {
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
45
|
+
// Use variables to prevent bundlers from trying to bundle these
|
|
46
|
+
const pathModule = "path";
|
|
47
|
+
const urlModule = "url";
|
|
48
|
+
const osModule = "os";
|
|
49
|
+
const workerThreadsModule = "node:worker_threads";
|
|
50
|
+
const [path, url, os, { Worker }] = await Promise.all([
|
|
51
|
+
import(pathModule),
|
|
52
|
+
import(urlModule),
|
|
53
|
+
import(osModule),
|
|
54
|
+
import(workerThreadsModule)
|
|
49
55
|
]);
|
|
50
56
|
const __filename = url.fileURLToPath(import.meta.url);
|
|
51
57
|
const __dirname = path.dirname(__filename);
|
|
52
58
|
const workerPath = path.join(__dirname, "node-worker.js");
|
|
59
|
+
// Limit workers to avoid freezing system
|
|
53
60
|
const numWorkers = Math.min(os.cpus().length, 4);
|
|
54
|
-
this.workerPool = new
|
|
61
|
+
this.workerPool = new WorkerPool(workerPath, numWorkers, Worker);
|
|
55
62
|
console.log(`🚀 OfflineCompiler: Node.js mode with ${numWorkers} workers`);
|
|
56
63
|
}
|
|
57
64
|
catch (e) {
|
|
58
|
-
console.log("⚡ OfflineCompiler: Running without workers");
|
|
65
|
+
console.log("⚡ OfflineCompiler: Running without workers (initialization failed)", e);
|
|
59
66
|
}
|
|
60
67
|
}
|
|
61
68
|
/**
|
|
@@ -120,9 +127,48 @@ export class OfflineCompiler {
|
|
|
120
127
|
async _compileMatch(targetImages, progressCallback) {
|
|
121
128
|
const percentPerImage = 100 / targetImages.length;
|
|
122
129
|
let currentPercent = 0;
|
|
130
|
+
// Use workers if available
|
|
131
|
+
if (this.workerPool) {
|
|
132
|
+
const promises = targetImages.map((targetImage, index) => {
|
|
133
|
+
return this.workerPool.runTask({
|
|
134
|
+
type: 'match',
|
|
135
|
+
targetImage,
|
|
136
|
+
percentPerImage,
|
|
137
|
+
basePercent: index * percentPerImage,
|
|
138
|
+
onProgress: (percent) => {
|
|
139
|
+
// Basic aggregation: this assumes naive progress updates.
|
|
140
|
+
// Ideally we should track exact progress per image.
|
|
141
|
+
// For now, simpler to just let the main thread loop handle overall progress callback?
|
|
142
|
+
// No, workers are async. We need to aggregate.
|
|
143
|
+
// Actually, the main loop below is serial.
|
|
144
|
+
// If we use workers, we run them in parallel.
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
});
|
|
148
|
+
// Progress handling for parallel workers is tricky without a shared state manager.
|
|
149
|
+
// Simplified approach: each worker reports its absolute progress contribution?
|
|
150
|
+
// No, worker reports 'percent' which is base + local.
|
|
151
|
+
// We can use a shared loadedPercent variable.
|
|
152
|
+
let totalPercent = 0;
|
|
153
|
+
const progressMap = new Float32Array(targetImages.length);
|
|
154
|
+
const wrappedPromises = targetImages.map((targetImage, index) => {
|
|
155
|
+
return this.workerPool.runTask({
|
|
156
|
+
type: 'match',
|
|
157
|
+
targetImage,
|
|
158
|
+
percentPerImage, // Not really needed for logic but worker expects it
|
|
159
|
+
basePercent: 0, // Worker will report 0-percentPerImage roughly
|
|
160
|
+
onProgress: (p) => {
|
|
161
|
+
// This 'p' from worker is "base + local". If we passed base=0, it's just local (0 to percentPerImage)
|
|
162
|
+
progressMap[index] = p;
|
|
163
|
+
const sum = progressMap.reduce((a, b) => a + b, 0);
|
|
164
|
+
progressCallback(sum);
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
});
|
|
168
|
+
return Promise.all(wrappedPromises);
|
|
169
|
+
}
|
|
170
|
+
// Serial Fallback
|
|
123
171
|
const results = [];
|
|
124
|
-
// Procesar secuencialmente para evitar overhead de workers
|
|
125
|
-
// (los workers son útiles para muchas imágenes, pero añaden latencia)
|
|
126
172
|
for (let i = 0; i < targetImages.length; i++) {
|
|
127
173
|
const targetImage = targetImages[i];
|
|
128
174
|
const imageList = buildImageList(targetImage);
|
|
@@ -157,6 +203,24 @@ export class OfflineCompiler {
|
|
|
157
203
|
async _compileTrack(targetImages, progressCallback) {
|
|
158
204
|
const percentPerImage = 100 / targetImages.length;
|
|
159
205
|
let currentPercent = 0;
|
|
206
|
+
if (this.workerPool) {
|
|
207
|
+
const progressMap = new Float32Array(targetImages.length);
|
|
208
|
+
const wrappedPromises = targetImages.map((targetImage, index) => {
|
|
209
|
+
return this.workerPool.runTask({
|
|
210
|
+
type: 'compile',
|
|
211
|
+
targetImage,
|
|
212
|
+
percentPerImage,
|
|
213
|
+
basePercent: 0,
|
|
214
|
+
onProgress: (p) => {
|
|
215
|
+
progressMap[index] = p;
|
|
216
|
+
const sum = progressMap.reduce((a, b) => a + b, 0);
|
|
217
|
+
progressCallback(sum);
|
|
218
|
+
}
|
|
219
|
+
});
|
|
220
|
+
});
|
|
221
|
+
return Promise.all(wrappedPromises);
|
|
222
|
+
}
|
|
223
|
+
// Serial Fallback
|
|
160
224
|
const results = [];
|
|
161
225
|
for (let i = 0; i < targetImages.length; i++) {
|
|
162
226
|
const targetImage = targetImages[i];
|
|
@@ -231,7 +295,8 @@ export class OfflineCompiler {
|
|
|
231
295
|
return msgpack.encode({
|
|
232
296
|
v: CURRENT_VERSION,
|
|
233
297
|
dataList,
|
|
234
|
-
|
|
298
|
+
// eslint-disable-next-line
|
|
299
|
+
}); // eslint-disable-line
|
|
235
300
|
}
|
|
236
301
|
_packKeyframe(kf) {
|
|
237
302
|
return {
|
|
@@ -1,9 +1,8 @@
|
|
|
1
|
-
import { Worker } from 'node:worker_threads';
|
|
2
|
-
import os from 'node:os';
|
|
3
1
|
export class WorkerPool {
|
|
4
|
-
constructor(workerPath, poolSize
|
|
2
|
+
constructor(workerPath, poolSize, WorkerClass) {
|
|
5
3
|
this.workerPath = workerPath;
|
|
6
4
|
this.poolSize = poolSize;
|
|
5
|
+
this.WorkerClass = WorkerClass;
|
|
7
6
|
this.workers = [];
|
|
8
7
|
this.queue = [];
|
|
9
8
|
this.activeWorkers = 0;
|
|
@@ -24,7 +23,7 @@ export class WorkerPool {
|
|
|
24
23
|
}
|
|
25
24
|
_createWorker() {
|
|
26
25
|
this.activeWorkers++;
|
|
27
|
-
const worker = new
|
|
26
|
+
const worker = new this.WorkerClass(this.workerPath);
|
|
28
27
|
return worker;
|
|
29
28
|
}
|
|
30
29
|
_executeTask(worker, task) {
|
|
@@ -62,10 +61,7 @@ export class WorkerPool {
|
|
|
62
61
|
serializableData[key] = value;
|
|
63
62
|
}
|
|
64
63
|
}
|
|
65
|
-
worker.postMessage(
|
|
66
|
-
type: 'compile',
|
|
67
|
-
...serializableData
|
|
68
|
-
});
|
|
64
|
+
worker.postMessage(serializableData);
|
|
69
65
|
}
|
|
70
66
|
_finishTask(worker, callback, result) {
|
|
71
67
|
if (this.queue.length > 0) {
|
package/package.json
CHANGED
|
@@ -20,6 +20,7 @@ import { extractTrackingFeatures } from "./tracker/extract-utils.js";
|
|
|
20
20
|
import { DetectorLite } from "./detector/detector-lite.js";
|
|
21
21
|
import { build as hierarchicalClusteringBuild } from "./matching/hierarchical-clustering.js";
|
|
22
22
|
import * as msgpack from "@msgpack/msgpack";
|
|
23
|
+
import { WorkerPool } from "./utils/worker-pool.js";
|
|
23
24
|
|
|
24
25
|
// Detect environment
|
|
25
26
|
const isNode = typeof process !== "undefined" &&
|
|
@@ -46,22 +47,30 @@ export class OfflineCompiler {
|
|
|
46
47
|
|
|
47
48
|
async _initNodeWorkers() {
|
|
48
49
|
try {
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
50
|
+
// Use variables to prevent bundlers from trying to bundle these
|
|
51
|
+
const pathModule = "path";
|
|
52
|
+
const urlModule = "url";
|
|
53
|
+
const osModule = "os";
|
|
54
|
+
const workerThreadsModule = "node:worker_threads";
|
|
55
|
+
|
|
56
|
+
const [path, url, os, { Worker }] = await Promise.all([
|
|
57
|
+
import(pathModule),
|
|
58
|
+
import(urlModule),
|
|
59
|
+
import(osModule),
|
|
60
|
+
import(workerThreadsModule)
|
|
54
61
|
]);
|
|
55
62
|
|
|
56
63
|
const __filename = url.fileURLToPath(import.meta.url);
|
|
57
64
|
const __dirname = path.dirname(__filename);
|
|
58
65
|
const workerPath = path.join(__dirname, "node-worker.js");
|
|
59
66
|
|
|
67
|
+
// Limit workers to avoid freezing system
|
|
60
68
|
const numWorkers = Math.min(os.cpus().length, 4);
|
|
61
|
-
|
|
69
|
+
|
|
70
|
+
this.workerPool = new WorkerPool(workerPath, numWorkers, Worker);
|
|
62
71
|
console.log(`🚀 OfflineCompiler: Node.js mode with ${numWorkers} workers`);
|
|
63
72
|
} catch (e) {
|
|
64
|
-
console.log("⚡ OfflineCompiler: Running without workers");
|
|
73
|
+
console.log("⚡ OfflineCompiler: Running without workers (initialization failed)", e);
|
|
65
74
|
}
|
|
66
75
|
}
|
|
67
76
|
|
|
@@ -142,10 +151,53 @@ export class OfflineCompiler {
|
|
|
142
151
|
const percentPerImage = 100 / targetImages.length;
|
|
143
152
|
let currentPercent = 0;
|
|
144
153
|
|
|
145
|
-
|
|
154
|
+
// Use workers if available
|
|
155
|
+
if (this.workerPool) {
|
|
156
|
+
const promises = targetImages.map((targetImage, index) => {
|
|
157
|
+
return this.workerPool.runTask({
|
|
158
|
+
type: 'match',
|
|
159
|
+
targetImage,
|
|
160
|
+
percentPerImage,
|
|
161
|
+
basePercent: index * percentPerImage,
|
|
162
|
+
onProgress: (percent) => {
|
|
163
|
+
// Basic aggregation: this assumes naive progress updates.
|
|
164
|
+
// Ideally we should track exact progress per image.
|
|
165
|
+
// For now, simpler to just let the main thread loop handle overall progress callback?
|
|
166
|
+
// No, workers are async. We need to aggregate.
|
|
167
|
+
// Actually, the main loop below is serial.
|
|
168
|
+
// If we use workers, we run them in parallel.
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
// Progress handling for parallel workers is tricky without a shared state manager.
|
|
174
|
+
// Simplified approach: each worker reports its absolute progress contribution?
|
|
175
|
+
// No, worker reports 'percent' which is base + local.
|
|
176
|
+
// We can use a shared loadedPercent variable.
|
|
177
|
+
|
|
178
|
+
let totalPercent = 0;
|
|
179
|
+
const progressMap = new Float32Array(targetImages.length);
|
|
180
|
+
|
|
181
|
+
const wrappedPromises = targetImages.map((targetImage, index) => {
|
|
182
|
+
return this.workerPool.runTask({
|
|
183
|
+
type: 'match',
|
|
184
|
+
targetImage,
|
|
185
|
+
percentPerImage, // Not really needed for logic but worker expects it
|
|
186
|
+
basePercent: 0, // Worker will report 0-percentPerImage roughly
|
|
187
|
+
onProgress: (p) => {
|
|
188
|
+
// This 'p' from worker is "base + local". If we passed base=0, it's just local (0 to percentPerImage)
|
|
189
|
+
progressMap[index] = p;
|
|
190
|
+
const sum = progressMap.reduce((a, b) => a + b, 0);
|
|
191
|
+
progressCallback(sum);
|
|
192
|
+
}
|
|
193
|
+
});
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
return Promise.all(wrappedPromises);
|
|
197
|
+
}
|
|
146
198
|
|
|
147
|
-
//
|
|
148
|
-
|
|
199
|
+
// Serial Fallback
|
|
200
|
+
const results = [];
|
|
149
201
|
for (let i = 0; i < targetImages.length; i++) {
|
|
150
202
|
const targetImage = targetImages[i];
|
|
151
203
|
const imageList = buildImageList(targetImage);
|
|
@@ -189,8 +241,26 @@ export class OfflineCompiler {
|
|
|
189
241
|
const percentPerImage = 100 / targetImages.length;
|
|
190
242
|
let currentPercent = 0;
|
|
191
243
|
|
|
192
|
-
|
|
244
|
+
if (this.workerPool) {
|
|
245
|
+
const progressMap = new Float32Array(targetImages.length);
|
|
246
|
+
const wrappedPromises = targetImages.map((targetImage, index) => {
|
|
247
|
+
return this.workerPool.runTask({
|
|
248
|
+
type: 'compile',
|
|
249
|
+
targetImage,
|
|
250
|
+
percentPerImage,
|
|
251
|
+
basePercent: 0,
|
|
252
|
+
onProgress: (p) => {
|
|
253
|
+
progressMap[index] = p;
|
|
254
|
+
const sum = progressMap.reduce((a, b) => a + b, 0);
|
|
255
|
+
progressCallback(sum);
|
|
256
|
+
}
|
|
257
|
+
});
|
|
258
|
+
});
|
|
259
|
+
return Promise.all(wrappedPromises);
|
|
260
|
+
}
|
|
193
261
|
|
|
262
|
+
// Serial Fallback
|
|
263
|
+
const results = [];
|
|
194
264
|
for (let i = 0; i < targetImages.length; i++) {
|
|
195
265
|
const targetImage = targetImages[i];
|
|
196
266
|
const imageList = buildTrackingImageList(targetImage);
|
|
@@ -274,7 +344,8 @@ export class OfflineCompiler {
|
|
|
274
344
|
return msgpack.encode({
|
|
275
345
|
v: CURRENT_VERSION,
|
|
276
346
|
dataList,
|
|
277
|
-
|
|
347
|
+
// eslint-disable-next-line
|
|
348
|
+
}); // eslint-disable-line
|
|
278
349
|
}
|
|
279
350
|
|
|
280
351
|
_packKeyframe(kf) {
|
|
@@ -1,10 +1,9 @@
|
|
|
1
|
-
import { Worker } from 'node:worker_threads';
|
|
2
|
-
import os from 'node:os';
|
|
3
1
|
|
|
4
2
|
export class WorkerPool {
|
|
5
|
-
constructor(workerPath, poolSize
|
|
3
|
+
constructor(workerPath, poolSize, WorkerClass) {
|
|
6
4
|
this.workerPath = workerPath;
|
|
7
5
|
this.poolSize = poolSize;
|
|
6
|
+
this.WorkerClass = WorkerClass;
|
|
8
7
|
this.workers = [];
|
|
9
8
|
this.queue = [];
|
|
10
9
|
this.activeWorkers = 0;
|
|
@@ -25,7 +24,7 @@ export class WorkerPool {
|
|
|
25
24
|
|
|
26
25
|
_createWorker() {
|
|
27
26
|
this.activeWorkers++;
|
|
28
|
-
const worker = new
|
|
27
|
+
const worker = new this.WorkerClass(this.workerPath);
|
|
29
28
|
return worker;
|
|
30
29
|
}
|
|
31
30
|
|
|
@@ -66,10 +65,7 @@ export class WorkerPool {
|
|
|
66
65
|
}
|
|
67
66
|
}
|
|
68
67
|
|
|
69
|
-
worker.postMessage(
|
|
70
|
-
type: 'compile',
|
|
71
|
-
...serializableData
|
|
72
|
-
});
|
|
68
|
+
worker.postMessage(serializableData);
|
|
73
69
|
}
|
|
74
70
|
|
|
75
71
|
_finishTask(worker, callback, result) {
|