@danielsimonjr/mathts-parallel 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +993 -0
- package/dist/index.js +953 -0
- package/package.json +57 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,953 @@
|
|
|
1
|
+
// src/ComputePool.ts
|
|
2
|
+
import {
|
|
3
|
+
MathWorkerPool,
|
|
4
|
+
Transfer
|
|
5
|
+
} from "@danielsimonjr/mathts-workerpool";
|
|
6
|
+
var DEFAULT_POOL_CONFIG = {
|
|
7
|
+
enabled: true,
|
|
8
|
+
minWorkers: 1,
|
|
9
|
+
maxWorkers: typeof navigator !== "undefined" ? navigator.hardwareConcurrency || 4 : 4,
|
|
10
|
+
thresholdElements: 5e4,
|
|
11
|
+
chunkSize: 1e4,
|
|
12
|
+
workerType: "auto",
|
|
13
|
+
workerIdleTimeout: 6e4,
|
|
14
|
+
taskTimeout: 3e5
|
|
15
|
+
// 5 minutes
|
|
16
|
+
};
|
|
17
|
+
function toParallelResult(result) {
|
|
18
|
+
return {
|
|
19
|
+
result: result.result,
|
|
20
|
+
duration: result.duration,
|
|
21
|
+
chunks: result.chunks,
|
|
22
|
+
parallelized: result.parallelized
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
function toWorkerConfig(config) {
|
|
26
|
+
return {
|
|
27
|
+
enabled: config.enabled,
|
|
28
|
+
minWorkers: config.minWorkers,
|
|
29
|
+
maxWorkers: config.maxWorkers,
|
|
30
|
+
parallelThreshold: config.thresholdElements,
|
|
31
|
+
chunkSize: config.chunkSize,
|
|
32
|
+
workerType: config.workerType,
|
|
33
|
+
idleTimeout: config.workerIdleTimeout,
|
|
34
|
+
taskTimeout: config.taskTimeout
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
var ComputePool = class {
|
|
38
|
+
workerPool;
|
|
39
|
+
config;
|
|
40
|
+
constructor(config = {}) {
|
|
41
|
+
this.config = { ...DEFAULT_POOL_CONFIG, ...config };
|
|
42
|
+
this.workerPool = new MathWorkerPool(toWorkerConfig(this.config));
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Initialize the worker pool
|
|
46
|
+
*/
|
|
47
|
+
async initialize() {
|
|
48
|
+
await this.workerPool.initialize();
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Check if pool is ready
|
|
52
|
+
*/
|
|
53
|
+
isReady() {
|
|
54
|
+
return this.workerPool.isReady();
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Determine if operation should be parallelized
|
|
58
|
+
*/
|
|
59
|
+
shouldParallelize(elementCount) {
|
|
60
|
+
return this.workerPool.shouldParallelize(elementCount);
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Execute a method in the worker pool
|
|
64
|
+
*/
|
|
65
|
+
async exec(method, params, options) {
|
|
66
|
+
return this.workerPool.exec(method, params, options);
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Get pool statistics
|
|
70
|
+
*/
|
|
71
|
+
stats() {
|
|
72
|
+
return this.workerPool.stats();
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Parallel sum of array elements
|
|
76
|
+
*/
|
|
77
|
+
async sum(data) {
|
|
78
|
+
const result = await this.workerPool.sum(data);
|
|
79
|
+
return toParallelResult(result);
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Parallel dot product
|
|
83
|
+
*/
|
|
84
|
+
async dot(a, b) {
|
|
85
|
+
const result = await this.workerPool.dot(a, b);
|
|
86
|
+
return toParallelResult(result);
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Parallel element-wise operation
|
|
90
|
+
*/
|
|
91
|
+
async elementwise(a, b, op) {
|
|
92
|
+
const result = await this.workerPool.elementwise(a, b, op);
|
|
93
|
+
return toParallelResult(result);
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Parallel scale operation
|
|
97
|
+
*/
|
|
98
|
+
async scale(data, scalar) {
|
|
99
|
+
const result = await this.workerPool.scale(data, scalar);
|
|
100
|
+
return toParallelResult(result);
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Parallel matrix multiplication
|
|
104
|
+
*
|
|
105
|
+
* @param a - First matrix as flat Float64Array (row-major)
|
|
106
|
+
* @param aRows - Number of rows in A
|
|
107
|
+
* @param aCols - Number of columns in A
|
|
108
|
+
* @param b - Second matrix as flat Float64Array (row-major)
|
|
109
|
+
* @param bCols - Number of columns in B
|
|
110
|
+
*/
|
|
111
|
+
async matmul(a, aRows, aCols, b, bCols) {
|
|
112
|
+
const result = await this.workerPool.matmul(a, aRows, aCols, b, bCols);
|
|
113
|
+
return toParallelResult(result);
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Parallel matrix transpose
|
|
117
|
+
*/
|
|
118
|
+
async transpose(data, rows, cols) {
|
|
119
|
+
const result = await this.workerPool.transpose(data, rows, cols);
|
|
120
|
+
return toParallelResult(result);
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Parallel map operation
|
|
124
|
+
*/
|
|
125
|
+
async map(data, fn) {
|
|
126
|
+
const result = await this.workerPool.map(data, fn);
|
|
127
|
+
return toParallelResult(result);
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Parallel reduce operation
|
|
131
|
+
*/
|
|
132
|
+
async reduce(data, fn, initial) {
|
|
133
|
+
const result = await this.workerPool.reduce(data, fn, initial);
|
|
134
|
+
return toParallelResult(result);
|
|
135
|
+
}
|
|
136
|
+
/**
|
|
137
|
+
* Parallel filter operation
|
|
138
|
+
*/
|
|
139
|
+
async filter(data, predicate) {
|
|
140
|
+
const result = await this.workerPool.filter(data, predicate);
|
|
141
|
+
return toParallelResult(result);
|
|
142
|
+
}
|
|
143
|
+
// =========================================================================
|
|
144
|
+
// Statistical Operations
|
|
145
|
+
// =========================================================================
|
|
146
|
+
/**
|
|
147
|
+
* Find min and max values in parallel
|
|
148
|
+
*/
|
|
149
|
+
async minMax(data) {
|
|
150
|
+
const result = await this.workerPool.minMax(data);
|
|
151
|
+
return toParallelResult(result);
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* Compute variance, mean, and standard deviation in parallel
|
|
155
|
+
*/
|
|
156
|
+
async variance(data) {
|
|
157
|
+
const result = await this.workerPool.variance(data);
|
|
158
|
+
return toParallelResult(result);
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Compute norm (Euclidean length) in parallel
|
|
162
|
+
*/
|
|
163
|
+
async norm(data) {
|
|
164
|
+
const result = await this.workerPool.norm(data);
|
|
165
|
+
return toParallelResult(result);
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Compute Euclidean distance in parallel
|
|
169
|
+
*/
|
|
170
|
+
async distance(a, b) {
|
|
171
|
+
const result = await this.workerPool.distance(a, b);
|
|
172
|
+
return toParallelResult(result);
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Compute histogram in parallel
|
|
176
|
+
*/
|
|
177
|
+
async histogram(data, bins, min, max) {
|
|
178
|
+
const result = await this.workerPool.histogram(data, bins, min, max);
|
|
179
|
+
return toParallelResult(result);
|
|
180
|
+
}
|
|
181
|
+
// =========================================================================
|
|
182
|
+
// Unary Operations
|
|
183
|
+
// =========================================================================
|
|
184
|
+
/**
|
|
185
|
+
* Apply unary function in parallel
|
|
186
|
+
*/
|
|
187
|
+
async unary(data, fn) {
|
|
188
|
+
const result = await this.workerPool.unary(data, fn);
|
|
189
|
+
return toParallelResult(result);
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Parallel absolute value
|
|
193
|
+
*/
|
|
194
|
+
async abs(data) {
|
|
195
|
+
return this.unary(data, "abs");
|
|
196
|
+
}
|
|
197
|
+
/**
|
|
198
|
+
* Parallel square root
|
|
199
|
+
*/
|
|
200
|
+
async sqrt(data) {
|
|
201
|
+
return this.unary(data, "sqrt");
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Parallel exponential
|
|
205
|
+
*/
|
|
206
|
+
async exp(data) {
|
|
207
|
+
return this.unary(data, "exp");
|
|
208
|
+
}
|
|
209
|
+
/**
|
|
210
|
+
* Parallel natural log
|
|
211
|
+
*/
|
|
212
|
+
async log(data) {
|
|
213
|
+
return this.unary(data, "log");
|
|
214
|
+
}
|
|
215
|
+
/**
|
|
216
|
+
* Parallel sine
|
|
217
|
+
*/
|
|
218
|
+
async sin(data) {
|
|
219
|
+
return this.unary(data, "sin");
|
|
220
|
+
}
|
|
221
|
+
/**
|
|
222
|
+
* Parallel cosine
|
|
223
|
+
*/
|
|
224
|
+
async cos(data) {
|
|
225
|
+
return this.unary(data, "cos");
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Parallel tangent
|
|
229
|
+
*/
|
|
230
|
+
async tan(data) {
|
|
231
|
+
return this.unary(data, "tan");
|
|
232
|
+
}
|
|
233
|
+
/**
|
|
234
|
+
* Parallel negation
|
|
235
|
+
*/
|
|
236
|
+
async negate(data) {
|
|
237
|
+
return this.unary(data, "negate");
|
|
238
|
+
}
|
|
239
|
+
/**
|
|
240
|
+
* Parallel square
|
|
241
|
+
*/
|
|
242
|
+
async square(data) {
|
|
243
|
+
return this.unary(data, "square");
|
|
244
|
+
}
|
|
245
|
+
// =========================================================================
|
|
246
|
+
// Additional Matrix Operations
|
|
247
|
+
// =========================================================================
|
|
248
|
+
/**
|
|
249
|
+
* Parallel matrix-vector multiplication
|
|
250
|
+
*/
|
|
251
|
+
async matvec(matrix, rows, cols, vector) {
|
|
252
|
+
const result = await this.workerPool.matvec(matrix, rows, cols, vector);
|
|
253
|
+
return toParallelResult(result);
|
|
254
|
+
}
|
|
255
|
+
/**
|
|
256
|
+
* Parallel outer product
|
|
257
|
+
*/
|
|
258
|
+
async outer(a, b) {
|
|
259
|
+
const result = await this.workerPool.outer(a, b);
|
|
260
|
+
return toParallelResult(result);
|
|
261
|
+
}
|
|
262
|
+
// =========================================================================
|
|
263
|
+
// Search and Sort Operations
|
|
264
|
+
// =========================================================================
|
|
265
|
+
/**
|
|
266
|
+
* Parallel find operation
|
|
267
|
+
*/
|
|
268
|
+
async find(data, predicate) {
|
|
269
|
+
const result = await this.workerPool.find(data, predicate);
|
|
270
|
+
return toParallelResult(result);
|
|
271
|
+
}
|
|
272
|
+
/**
|
|
273
|
+
* Parallel sort operation
|
|
274
|
+
*/
|
|
275
|
+
async sort(data, compare) {
|
|
276
|
+
const result = await this.workerPool.sort(data, compare);
|
|
277
|
+
return toParallelResult(result);
|
|
278
|
+
}
|
|
279
|
+
// =========================================================================
|
|
280
|
+
// Convenience Methods
|
|
281
|
+
// =========================================================================
|
|
282
|
+
/**
|
|
283
|
+
* Parallel addition of two arrays
|
|
284
|
+
*/
|
|
285
|
+
async add(a, b) {
|
|
286
|
+
return this.elementwise(a, b, "add");
|
|
287
|
+
}
|
|
288
|
+
/**
|
|
289
|
+
* Parallel subtraction of two arrays
|
|
290
|
+
*/
|
|
291
|
+
async subtract(a, b) {
|
|
292
|
+
return this.elementwise(a, b, "subtract");
|
|
293
|
+
}
|
|
294
|
+
/**
|
|
295
|
+
* Parallel element-wise multiplication
|
|
296
|
+
*/
|
|
297
|
+
async multiply(a, b) {
|
|
298
|
+
return this.elementwise(a, b, "multiply");
|
|
299
|
+
}
|
|
300
|
+
/**
|
|
301
|
+
* Parallel element-wise division
|
|
302
|
+
*/
|
|
303
|
+
async divide(a, b) {
|
|
304
|
+
return this.elementwise(a, b, "divide");
|
|
305
|
+
}
|
|
306
|
+
/**
|
|
307
|
+
* Compute mean in parallel (uses variance internally)
|
|
308
|
+
*/
|
|
309
|
+
async mean(data) {
|
|
310
|
+
const result = await this.variance(data);
|
|
311
|
+
return {
|
|
312
|
+
...result,
|
|
313
|
+
result: result.result.mean
|
|
314
|
+
};
|
|
315
|
+
}
|
|
316
|
+
/**
|
|
317
|
+
* Compute standard deviation in parallel
|
|
318
|
+
*/
|
|
319
|
+
async std(data) {
|
|
320
|
+
const result = await this.variance(data);
|
|
321
|
+
return {
|
|
322
|
+
...result,
|
|
323
|
+
result: result.result.std
|
|
324
|
+
};
|
|
325
|
+
}
|
|
326
|
+
/**
|
|
327
|
+
* Find minimum value in parallel
|
|
328
|
+
*/
|
|
329
|
+
async min(data) {
|
|
330
|
+
const result = await this.minMax(data);
|
|
331
|
+
return {
|
|
332
|
+
...result,
|
|
333
|
+
result: result.result.min
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
/**
|
|
337
|
+
* Find maximum value in parallel
|
|
338
|
+
*/
|
|
339
|
+
async max(data) {
|
|
340
|
+
const result = await this.minMax(data);
|
|
341
|
+
return {
|
|
342
|
+
...result,
|
|
343
|
+
result: result.result.max
|
|
344
|
+
};
|
|
345
|
+
}
|
|
346
|
+
/**
|
|
347
|
+
* Terminate the worker pool
|
|
348
|
+
*/
|
|
349
|
+
async terminate(force = false) {
|
|
350
|
+
await this.workerPool.terminate(force);
|
|
351
|
+
}
|
|
352
|
+
/**
|
|
353
|
+
* Update configuration
|
|
354
|
+
*/
|
|
355
|
+
updateConfig(config) {
|
|
356
|
+
this.config = { ...this.config, ...config };
|
|
357
|
+
this.workerPool.updateConfig(toWorkerConfig(this.config));
|
|
358
|
+
}
|
|
359
|
+
/**
|
|
360
|
+
* Get current configuration
|
|
361
|
+
*/
|
|
362
|
+
getConfig() {
|
|
363
|
+
return { ...this.config };
|
|
364
|
+
}
|
|
365
|
+
/**
|
|
366
|
+
* Get the underlying MathWorkerPool for advanced operations
|
|
367
|
+
*/
|
|
368
|
+
getWorkerPool() {
|
|
369
|
+
return this.workerPool;
|
|
370
|
+
}
|
|
371
|
+
};
|
|
372
|
+
var computePool = new ComputePool();
|
|
373
|
+
|
|
374
|
+
// src/operations/matmul.ts
|
|
375
|
+
async function parallelMatmul(a, aRows, aCols, b, bCols, options = {}) {
|
|
376
|
+
const pool = options.pool ?? computePool;
|
|
377
|
+
return pool.matmul(a, aRows, aCols, b, bCols);
|
|
378
|
+
}
|
|
379
|
+
async function parallelMatvec(matrix, rows, cols, vector, options = {}) {
|
|
380
|
+
const pool = options.pool ?? computePool;
|
|
381
|
+
return pool.matvec(matrix, rows, cols, vector);
|
|
382
|
+
}
|
|
383
|
+
async function parallelTranspose(data, rows, cols, options = {}) {
|
|
384
|
+
const pool = options.pool ?? computePool;
|
|
385
|
+
return pool.transpose(data, rows, cols);
|
|
386
|
+
}
|
|
387
|
+
async function parallelOuter(a, b, options = {}) {
|
|
388
|
+
const pool = options.pool ?? computePool;
|
|
389
|
+
return pool.outer(a, b);
|
|
390
|
+
}
|
|
391
|
+
async function parallelDot(a, b, options = {}) {
|
|
392
|
+
const pool = options.pool ?? computePool;
|
|
393
|
+
return pool.dot(a, b);
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
// src/operations/elementwise.ts
|
|
397
|
+
async function parallelAdd(a, b, options = {}) {
|
|
398
|
+
const pool = options.pool ?? computePool;
|
|
399
|
+
return pool.add(a, b);
|
|
400
|
+
}
|
|
401
|
+
async function parallelSubtract(a, b, options = {}) {
|
|
402
|
+
const pool = options.pool ?? computePool;
|
|
403
|
+
return pool.subtract(a, b);
|
|
404
|
+
}
|
|
405
|
+
async function parallelMultiply(a, b, options = {}) {
|
|
406
|
+
const pool = options.pool ?? computePool;
|
|
407
|
+
return pool.multiply(a, b);
|
|
408
|
+
}
|
|
409
|
+
async function parallelDivide(a, b, options = {}) {
|
|
410
|
+
const pool = options.pool ?? computePool;
|
|
411
|
+
return pool.divide(a, b);
|
|
412
|
+
}
|
|
413
|
+
async function parallelScale(data, scalar, options = {}) {
|
|
414
|
+
const pool = options.pool ?? computePool;
|
|
415
|
+
return pool.scale(data, scalar);
|
|
416
|
+
}
|
|
417
|
+
async function parallelAbs(data, options = {}) {
|
|
418
|
+
const pool = options.pool ?? computePool;
|
|
419
|
+
return pool.abs(data);
|
|
420
|
+
}
|
|
421
|
+
async function parallelNegate(data, options = {}) {
|
|
422
|
+
const pool = options.pool ?? computePool;
|
|
423
|
+
return pool.negate(data);
|
|
424
|
+
}
|
|
425
|
+
async function parallelSquare(data, options = {}) {
|
|
426
|
+
const pool = options.pool ?? computePool;
|
|
427
|
+
return pool.square(data);
|
|
428
|
+
}
|
|
429
|
+
async function parallelSqrt(data, options = {}) {
|
|
430
|
+
const pool = options.pool ?? computePool;
|
|
431
|
+
return pool.sqrt(data);
|
|
432
|
+
}
|
|
433
|
+
async function parallelExp(data, options = {}) {
|
|
434
|
+
const pool = options.pool ?? computePool;
|
|
435
|
+
return pool.exp(data);
|
|
436
|
+
}
|
|
437
|
+
async function parallelLog(data, options = {}) {
|
|
438
|
+
const pool = options.pool ?? computePool;
|
|
439
|
+
return pool.log(data);
|
|
440
|
+
}
|
|
441
|
+
async function parallelSin(data, options = {}) {
|
|
442
|
+
const pool = options.pool ?? computePool;
|
|
443
|
+
return pool.sin(data);
|
|
444
|
+
}
|
|
445
|
+
async function parallelCos(data, options = {}) {
|
|
446
|
+
const pool = options.pool ?? computePool;
|
|
447
|
+
return pool.cos(data);
|
|
448
|
+
}
|
|
449
|
+
async function parallelTan(data, options = {}) {
|
|
450
|
+
const pool = options.pool ?? computePool;
|
|
451
|
+
return pool.tan(data);
|
|
452
|
+
}
|
|
453
|
+
async function parallelElementwise(a, b, op, options = {}) {
|
|
454
|
+
const pool = options.pool ?? computePool;
|
|
455
|
+
return pool.elementwise(a, b, op);
|
|
456
|
+
}
|
|
457
|
+
async function parallelUnary(data, fn, options = {}) {
|
|
458
|
+
const pool = options.pool ?? computePool;
|
|
459
|
+
return pool.unary(data, fn);
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
// src/operations/reduce.ts
|
|
463
|
+
async function parallelSum(data, options = {}) {
|
|
464
|
+
const pool = options.pool ?? computePool;
|
|
465
|
+
return pool.sum(data);
|
|
466
|
+
}
|
|
467
|
+
async function parallelMean(data, options = {}) {
|
|
468
|
+
const pool = options.pool ?? computePool;
|
|
469
|
+
return pool.mean(data);
|
|
470
|
+
}
|
|
471
|
+
async function parallelMin(data, options = {}) {
|
|
472
|
+
const pool = options.pool ?? computePool;
|
|
473
|
+
return pool.min(data);
|
|
474
|
+
}
|
|
475
|
+
async function parallelMax(data, options = {}) {
|
|
476
|
+
const pool = options.pool ?? computePool;
|
|
477
|
+
return pool.max(data);
|
|
478
|
+
}
|
|
479
|
+
async function parallelMinMax(data, options = {}) {
|
|
480
|
+
const pool = options.pool ?? computePool;
|
|
481
|
+
return pool.minMax(data);
|
|
482
|
+
}
|
|
483
|
+
async function parallelVariance(data, options = {}) {
|
|
484
|
+
const pool = options.pool ?? computePool;
|
|
485
|
+
return pool.variance(data);
|
|
486
|
+
}
|
|
487
|
+
async function parallelStd(data, options = {}) {
|
|
488
|
+
const pool = options.pool ?? computePool;
|
|
489
|
+
return pool.std(data);
|
|
490
|
+
}
|
|
491
|
+
async function parallelNorm(data, options = {}) {
|
|
492
|
+
const pool = options.pool ?? computePool;
|
|
493
|
+
return pool.norm(data);
|
|
494
|
+
}
|
|
495
|
+
async function parallelDistance(a, b, options = {}) {
|
|
496
|
+
const pool = options.pool ?? computePool;
|
|
497
|
+
return pool.distance(a, b);
|
|
498
|
+
}
|
|
499
|
+
async function parallelHistogram(data, bins, min, max, options = {}) {
|
|
500
|
+
const pool = options.pool ?? computePool;
|
|
501
|
+
return pool.histogram(data, bins, min, max);
|
|
502
|
+
}
|
|
503
|
+
async function parallelReduce(data, fn, initial, options = {}) {
|
|
504
|
+
const pool = options.pool ?? computePool;
|
|
505
|
+
return pool.reduce(data, fn, initial);
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
// src/operations/map.ts
|
|
509
|
+
async function parallelMap(data, fn, options = {}) {
|
|
510
|
+
const pool = options.pool ?? computePool;
|
|
511
|
+
return pool.map(data, fn);
|
|
512
|
+
}
|
|
513
|
+
async function parallelFilter(data, predicate, options = {}) {
|
|
514
|
+
const pool = options.pool ?? computePool;
|
|
515
|
+
return pool.filter(data, predicate);
|
|
516
|
+
}
|
|
517
|
+
async function parallelFind(data, predicate, options = {}) {
|
|
518
|
+
const pool = options.pool ?? computePool;
|
|
519
|
+
return pool.find(data, predicate);
|
|
520
|
+
}
|
|
521
|
+
async function parallelSort(data, compare, options = {}) {
|
|
522
|
+
const pool = options.pool ?? computePool;
|
|
523
|
+
return pool.sort(data, compare);
|
|
524
|
+
}
|
|
525
|
+
async function parallelForEach(data, fn, options = {}) {
|
|
526
|
+
const pool = options.pool ?? computePool;
|
|
527
|
+
const result = await pool.map(data, (item) => {
|
|
528
|
+
fn(item);
|
|
529
|
+
return void 0;
|
|
530
|
+
});
|
|
531
|
+
return {
|
|
532
|
+
...result,
|
|
533
|
+
result: result.result
|
|
534
|
+
};
|
|
535
|
+
}
|
|
536
|
+
async function parallelSome(data, predicate, options = {}) {
|
|
537
|
+
const pool = options.pool ?? computePool;
|
|
538
|
+
const findResult = await pool.find(data, predicate);
|
|
539
|
+
return {
|
|
540
|
+
...findResult,
|
|
541
|
+
result: findResult.result.found
|
|
542
|
+
};
|
|
543
|
+
}
|
|
544
|
+
async function parallelEvery(data, predicate, options = {}) {
|
|
545
|
+
const pool = options.pool ?? computePool;
|
|
546
|
+
const findResult = await pool.find(data, (item) => !predicate(item));
|
|
547
|
+
return {
|
|
548
|
+
...findResult,
|
|
549
|
+
result: !findResult.result.found
|
|
550
|
+
};
|
|
551
|
+
}
|
|
552
|
+
async function parallelCount(data, predicate, options = {}) {
|
|
553
|
+
const pool = options.pool ?? computePool;
|
|
554
|
+
if (!predicate) {
|
|
555
|
+
return {
|
|
556
|
+
result: data.length,
|
|
557
|
+
duration: 0,
|
|
558
|
+
chunks: 1,
|
|
559
|
+
parallelized: false
|
|
560
|
+
};
|
|
561
|
+
}
|
|
562
|
+
const filterResult = await pool.filter(data, predicate);
|
|
563
|
+
return {
|
|
564
|
+
...filterResult,
|
|
565
|
+
result: filterResult.result.length
|
|
566
|
+
};
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
// src/strategies/chunk.ts
|
|
570
|
+
var DEFAULT_CHUNK_OPTIONS = {
|
|
571
|
+
minChunkSize: 1e3,
|
|
572
|
+
maxChunks: typeof navigator !== "undefined" ? navigator.hardwareConcurrency || 4 : 4,
|
|
573
|
+
targetChunkSize: 0,
|
|
574
|
+
// 0 = auto
|
|
575
|
+
balanced: true
|
|
576
|
+
};
|
|
577
|
+
function calculateOptimalChunks(totalElements, options = {}) {
|
|
578
|
+
const { minChunkSize, maxChunks, targetChunkSize } = {
|
|
579
|
+
...DEFAULT_CHUNK_OPTIONS,
|
|
580
|
+
...options
|
|
581
|
+
};
|
|
582
|
+
if (totalElements === 0) return 0;
|
|
583
|
+
if (targetChunkSize > 0) {
|
|
584
|
+
return Math.min(maxChunks, Math.ceil(totalElements / targetChunkSize));
|
|
585
|
+
}
|
|
586
|
+
const maxPossibleChunks = Math.floor(totalElements / minChunkSize);
|
|
587
|
+
return Math.max(1, Math.min(maxChunks, maxPossibleChunks));
|
|
588
|
+
}
|
|
589
|
+
function chunkFloat64Array(data, options = {}) {
|
|
590
|
+
const { minChunkSize, balanced } = { ...DEFAULT_CHUNK_OPTIONS, ...options };
|
|
591
|
+
const totalElements = data.length;
|
|
592
|
+
if (totalElements === 0) {
|
|
593
|
+
return {
|
|
594
|
+
chunks: [],
|
|
595
|
+
chunkInfo: [],
|
|
596
|
+
totalElements: 0,
|
|
597
|
+
numChunks: 0
|
|
598
|
+
};
|
|
599
|
+
}
|
|
600
|
+
const numChunks = calculateOptimalChunks(totalElements, options);
|
|
601
|
+
if (numChunks <= 1) {
|
|
602
|
+
return {
|
|
603
|
+
chunks: [data],
|
|
604
|
+
chunkInfo: [
|
|
605
|
+
{
|
|
606
|
+
startIndex: 0,
|
|
607
|
+
endIndex: totalElements,
|
|
608
|
+
length: totalElements,
|
|
609
|
+
chunkIndex: 0
|
|
610
|
+
}
|
|
611
|
+
],
|
|
612
|
+
totalElements,
|
|
613
|
+
numChunks: 1
|
|
614
|
+
};
|
|
615
|
+
}
|
|
616
|
+
const chunks = [];
|
|
617
|
+
const chunkInfo = [];
|
|
618
|
+
if (balanced) {
|
|
619
|
+
const baseChunkSize = Math.floor(totalElements / numChunks);
|
|
620
|
+
const remainder = totalElements % numChunks;
|
|
621
|
+
let currentIndex = 0;
|
|
622
|
+
for (let i = 0; i < numChunks; i++) {
|
|
623
|
+
const thisChunkSize = baseChunkSize + (i < remainder ? 1 : 0);
|
|
624
|
+
const endIndex = currentIndex + thisChunkSize;
|
|
625
|
+
chunks.push(data.subarray(currentIndex, endIndex));
|
|
626
|
+
chunkInfo.push({
|
|
627
|
+
startIndex: currentIndex,
|
|
628
|
+
endIndex,
|
|
629
|
+
length: thisChunkSize,
|
|
630
|
+
chunkIndex: i
|
|
631
|
+
});
|
|
632
|
+
currentIndex = endIndex;
|
|
633
|
+
}
|
|
634
|
+
} else {
|
|
635
|
+
const chunkSize = Math.max(minChunkSize, Math.ceil(totalElements / numChunks));
|
|
636
|
+
let currentIndex = 0;
|
|
637
|
+
let chunkIndex = 0;
|
|
638
|
+
while (currentIndex < totalElements) {
|
|
639
|
+
const endIndex = Math.min(currentIndex + chunkSize, totalElements);
|
|
640
|
+
const thisChunkSize = endIndex - currentIndex;
|
|
641
|
+
chunks.push(data.subarray(currentIndex, endIndex));
|
|
642
|
+
chunkInfo.push({
|
|
643
|
+
startIndex: currentIndex,
|
|
644
|
+
endIndex,
|
|
645
|
+
length: thisChunkSize,
|
|
646
|
+
chunkIndex
|
|
647
|
+
});
|
|
648
|
+
currentIndex = endIndex;
|
|
649
|
+
chunkIndex++;
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
return {
|
|
653
|
+
chunks,
|
|
654
|
+
chunkInfo,
|
|
655
|
+
totalElements,
|
|
656
|
+
numChunks: chunks.length
|
|
657
|
+
};
|
|
658
|
+
}
|
|
659
|
+
function chunkArray(data, options = {}) {
|
|
660
|
+
const { balanced } = { ...DEFAULT_CHUNK_OPTIONS, ...options };
|
|
661
|
+
const totalElements = data.length;
|
|
662
|
+
if (totalElements === 0) {
|
|
663
|
+
return {
|
|
664
|
+
chunks: [],
|
|
665
|
+
chunkInfo: [],
|
|
666
|
+
totalElements: 0,
|
|
667
|
+
numChunks: 0
|
|
668
|
+
};
|
|
669
|
+
}
|
|
670
|
+
const numChunks = calculateOptimalChunks(totalElements, options);
|
|
671
|
+
if (numChunks <= 1) {
|
|
672
|
+
return {
|
|
673
|
+
chunks: [data],
|
|
674
|
+
chunkInfo: [
|
|
675
|
+
{
|
|
676
|
+
startIndex: 0,
|
|
677
|
+
endIndex: totalElements,
|
|
678
|
+
length: totalElements,
|
|
679
|
+
chunkIndex: 0
|
|
680
|
+
}
|
|
681
|
+
],
|
|
682
|
+
totalElements,
|
|
683
|
+
numChunks: 1
|
|
684
|
+
};
|
|
685
|
+
}
|
|
686
|
+
const chunks = [];
|
|
687
|
+
const chunkInfo = [];
|
|
688
|
+
if (balanced) {
|
|
689
|
+
const baseChunkSize = Math.floor(totalElements / numChunks);
|
|
690
|
+
const remainder = totalElements % numChunks;
|
|
691
|
+
let currentIndex = 0;
|
|
692
|
+
for (let i = 0; i < numChunks; i++) {
|
|
693
|
+
const thisChunkSize = baseChunkSize + (i < remainder ? 1 : 0);
|
|
694
|
+
const endIndex = currentIndex + thisChunkSize;
|
|
695
|
+
chunks.push(data.slice(currentIndex, endIndex));
|
|
696
|
+
chunkInfo.push({
|
|
697
|
+
startIndex: currentIndex,
|
|
698
|
+
endIndex,
|
|
699
|
+
length: thisChunkSize,
|
|
700
|
+
chunkIndex: i
|
|
701
|
+
});
|
|
702
|
+
currentIndex = endIndex;
|
|
703
|
+
}
|
|
704
|
+
} else {
|
|
705
|
+
const chunkSize = Math.ceil(totalElements / numChunks);
|
|
706
|
+
let currentIndex = 0;
|
|
707
|
+
let chunkIndex = 0;
|
|
708
|
+
while (currentIndex < totalElements) {
|
|
709
|
+
const endIndex = Math.min(currentIndex + chunkSize, totalElements);
|
|
710
|
+
const thisChunkSize = endIndex - currentIndex;
|
|
711
|
+
chunks.push(data.slice(currentIndex, endIndex));
|
|
712
|
+
chunkInfo.push({
|
|
713
|
+
startIndex: currentIndex,
|
|
714
|
+
endIndex,
|
|
715
|
+
length: thisChunkSize,
|
|
716
|
+
chunkIndex
|
|
717
|
+
});
|
|
718
|
+
currentIndex = endIndex;
|
|
719
|
+
chunkIndex++;
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
return {
|
|
723
|
+
chunks,
|
|
724
|
+
chunkInfo,
|
|
725
|
+
totalElements,
|
|
726
|
+
numChunks: chunks.length
|
|
727
|
+
};
|
|
728
|
+
}
|
|
729
|
+
function mergeFloat64Chunks(chunks) {
|
|
730
|
+
if (chunks.length === 0) return new Float64Array(0);
|
|
731
|
+
if (chunks.length === 1) return chunks[0];
|
|
732
|
+
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
733
|
+
const result = new Float64Array(totalLength);
|
|
734
|
+
let offset = 0;
|
|
735
|
+
for (const chunk of chunks) {
|
|
736
|
+
result.set(chunk, offset);
|
|
737
|
+
offset += chunk.length;
|
|
738
|
+
}
|
|
739
|
+
return result;
|
|
740
|
+
}
|
|
741
|
+
function mergeArrayChunks(chunks) {
|
|
742
|
+
if (chunks.length === 0) return [];
|
|
743
|
+
if (chunks.length === 1) return chunks[0];
|
|
744
|
+
return chunks.flat();
|
|
745
|
+
}
|
|
746
|
+
function shouldParallelize(elementCount, threshold = 1e4) {
|
|
747
|
+
return elementCount >= threshold;
|
|
748
|
+
}
|
|
749
|
+
function partitionRange(start, end, options = {}) {
|
|
750
|
+
const totalElements = end - start;
|
|
751
|
+
const numChunks = calculateOptimalChunks(totalElements, options);
|
|
752
|
+
if (numChunks <= 1) {
|
|
753
|
+
return [[start, end]];
|
|
754
|
+
}
|
|
755
|
+
const partitions = [];
|
|
756
|
+
const baseChunkSize = Math.floor(totalElements / numChunks);
|
|
757
|
+
const remainder = totalElements % numChunks;
|
|
758
|
+
let currentIndex = start;
|
|
759
|
+
for (let i = 0; i < numChunks; i++) {
|
|
760
|
+
const thisChunkSize = baseChunkSize + (i < remainder ? 1 : 0);
|
|
761
|
+
const endIndex = currentIndex + thisChunkSize;
|
|
762
|
+
partitions.push([currentIndex, endIndex]);
|
|
763
|
+
currentIndex = endIndex;
|
|
764
|
+
}
|
|
765
|
+
return partitions;
|
|
766
|
+
}
|
|
767
|
+
function partition2D(rows, cols, options = {}) {
|
|
768
|
+
const rowPartitions = partitionRange(0, rows, options);
|
|
769
|
+
return rowPartitions.map(([rowStart, rowEnd]) => [rowStart, rowEnd, 0, cols]);
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
// src/strategies/threshold.ts
|
|
773
|
+
var DEFAULT_THRESHOLDS = {
|
|
774
|
+
matmul: 1e4,
|
|
775
|
+
// ~100x100 matrix
|
|
776
|
+
elementwise: 5e4,
|
|
777
|
+
// ~50K elements
|
|
778
|
+
reduce: 1e5,
|
|
779
|
+
// ~100K elements
|
|
780
|
+
map: 1e4,
|
|
781
|
+
// ~10K elements
|
|
782
|
+
sort: 5e3,
|
|
783
|
+
// ~5K elements
|
|
784
|
+
decomposition: 2500,
|
|
785
|
+
// ~50x50 matrix
|
|
786
|
+
general: 5e4
|
|
787
|
+
// Default ~50K
|
|
788
|
+
};
|
|
789
|
+
var ThresholdDispatcher = class {
|
|
790
|
+
thresholds;
|
|
791
|
+
pool;
|
|
792
|
+
constructor(thresholds = {}, pool) {
|
|
793
|
+
this.thresholds = { ...DEFAULT_THRESHOLDS, ...thresholds };
|
|
794
|
+
this.pool = pool ?? computePool;
|
|
795
|
+
}
|
|
796
|
+
/**
|
|
797
|
+
* Get threshold for a specific operation category
|
|
798
|
+
*/
|
|
799
|
+
getThreshold(category) {
|
|
800
|
+
return this.thresholds[category];
|
|
801
|
+
}
|
|
802
|
+
/**
|
|
803
|
+
* Update thresholds
|
|
804
|
+
*/
|
|
805
|
+
setThresholds(thresholds) {
|
|
806
|
+
this.thresholds = { ...this.thresholds, ...thresholds };
|
|
807
|
+
}
|
|
808
|
+
/**
|
|
809
|
+
* Get current threshold configuration
|
|
810
|
+
*/
|
|
811
|
+
getThresholds() {
|
|
812
|
+
return { ...this.thresholds };
|
|
813
|
+
}
|
|
814
|
+
/**
|
|
815
|
+
* Determine execution mode based on operation and data size
|
|
816
|
+
*/
|
|
817
|
+
dispatch(elementCount, category = "general") {
|
|
818
|
+
const threshold = this.thresholds[category];
|
|
819
|
+
const poolReady = this.pool.isReady();
|
|
820
|
+
if (!poolReady) {
|
|
821
|
+
return {
|
|
822
|
+
mode: "sequential",
|
|
823
|
+
reason: "Worker pool not initialized",
|
|
824
|
+
threshold,
|
|
825
|
+
elementCount
|
|
826
|
+
};
|
|
827
|
+
}
|
|
828
|
+
const config = this.pool.getConfig();
|
|
829
|
+
if (!config.enabled) {
|
|
830
|
+
return {
|
|
831
|
+
mode: "sequential",
|
|
832
|
+
reason: "Parallel processing disabled",
|
|
833
|
+
threshold,
|
|
834
|
+
elementCount
|
|
835
|
+
};
|
|
836
|
+
}
|
|
837
|
+
if (elementCount < threshold) {
|
|
838
|
+
return {
|
|
839
|
+
mode: "sequential",
|
|
840
|
+
reason: `Element count (${elementCount}) below threshold (${threshold})`,
|
|
841
|
+
threshold,
|
|
842
|
+
elementCount
|
|
843
|
+
};
|
|
844
|
+
}
|
|
845
|
+
return {
|
|
846
|
+
mode: "parallel",
|
|
847
|
+
reason: `Element count (${elementCount}) exceeds threshold (${threshold})`,
|
|
848
|
+
threshold,
|
|
849
|
+
elementCount
|
|
850
|
+
};
|
|
851
|
+
}
|
|
852
|
+
/**
|
|
853
|
+
* Simple boolean check for parallel execution
|
|
854
|
+
*/
|
|
855
|
+
shouldParallelize(elementCount, category = "general") {
|
|
856
|
+
return this.dispatch(elementCount, category).mode === "parallel";
|
|
857
|
+
}
|
|
858
|
+
/**
|
|
859
|
+
* Calculate optimal chunk count based on operation and data size
|
|
860
|
+
*/
|
|
861
|
+
calculateChunks(elementCount, category = "general") {
|
|
862
|
+
const dispatch2 = this.dispatch(elementCount, category);
|
|
863
|
+
if (dispatch2.mode === "sequential") {
|
|
864
|
+
return 1;
|
|
865
|
+
}
|
|
866
|
+
const stats = this.pool.stats();
|
|
867
|
+
const workerCount = Math.max(1, stats.totalWorkers);
|
|
868
|
+
switch (category) {
|
|
869
|
+
case "matmul":
|
|
870
|
+
case "decomposition":
|
|
871
|
+
return workerCount;
|
|
872
|
+
case "elementwise":
|
|
873
|
+
case "map":
|
|
874
|
+
return Math.min(workerCount * 2, Math.ceil(elementCount / 1e4));
|
|
875
|
+
case "reduce":
|
|
876
|
+
return Math.min(workerCount, Math.ceil(elementCount / 5e4));
|
|
877
|
+
case "sort":
|
|
878
|
+
return Math.min(workerCount, 8);
|
|
879
|
+
default:
|
|
880
|
+
return workerCount;
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
};
|
|
884
|
+
var thresholdDispatcher = new ThresholdDispatcher();
|
|
885
|
+
function shouldParallelize2(elementCount, category = "general") {
|
|
886
|
+
return thresholdDispatcher.shouldParallelize(elementCount, category);
|
|
887
|
+
}
|
|
888
|
+
function dispatch(elementCount, category = "general") {
|
|
889
|
+
return thresholdDispatcher.dispatch(elementCount, category);
|
|
890
|
+
}
|
|
891
|
+
function calculateChunks(elementCount, category = "general") {
|
|
892
|
+
return thresholdDispatcher.calculateChunks(elementCount, category);
|
|
893
|
+
}
|
|
894
|
+
export {
|
|
895
|
+
ComputePool,
|
|
896
|
+
DEFAULT_POOL_CONFIG,
|
|
897
|
+
DEFAULT_THRESHOLDS,
|
|
898
|
+
ThresholdDispatcher,
|
|
899
|
+
Transfer,
|
|
900
|
+
calculateChunks,
|
|
901
|
+
calculateOptimalChunks,
|
|
902
|
+
chunkArray,
|
|
903
|
+
chunkFloat64Array,
|
|
904
|
+
computePool,
|
|
905
|
+
dispatch,
|
|
906
|
+
mergeArrayChunks,
|
|
907
|
+
mergeFloat64Chunks,
|
|
908
|
+
parallelAbs,
|
|
909
|
+
parallelAdd,
|
|
910
|
+
parallelCos,
|
|
911
|
+
parallelCount,
|
|
912
|
+
parallelDistance,
|
|
913
|
+
parallelDivide,
|
|
914
|
+
parallelDot,
|
|
915
|
+
parallelElementwise,
|
|
916
|
+
parallelEvery,
|
|
917
|
+
parallelExp,
|
|
918
|
+
parallelFilter,
|
|
919
|
+
parallelFind,
|
|
920
|
+
parallelForEach,
|
|
921
|
+
parallelHistogram,
|
|
922
|
+
parallelLog,
|
|
923
|
+
parallelMap,
|
|
924
|
+
parallelMatmul,
|
|
925
|
+
parallelMatvec,
|
|
926
|
+
parallelMax,
|
|
927
|
+
parallelMean,
|
|
928
|
+
parallelMin,
|
|
929
|
+
parallelMinMax,
|
|
930
|
+
parallelMultiply,
|
|
931
|
+
parallelNegate,
|
|
932
|
+
parallelNorm,
|
|
933
|
+
parallelOuter,
|
|
934
|
+
parallelReduce,
|
|
935
|
+
parallelScale,
|
|
936
|
+
parallelSin,
|
|
937
|
+
parallelSome,
|
|
938
|
+
parallelSort,
|
|
939
|
+
parallelSqrt,
|
|
940
|
+
parallelSquare,
|
|
941
|
+
parallelStd,
|
|
942
|
+
parallelSubtract,
|
|
943
|
+
parallelSum,
|
|
944
|
+
parallelTan,
|
|
945
|
+
parallelTranspose,
|
|
946
|
+
parallelUnary,
|
|
947
|
+
parallelVariance,
|
|
948
|
+
partition2D,
|
|
949
|
+
partitionRange,
|
|
950
|
+
shouldParallelize as shouldChunkParallelize,
|
|
951
|
+
shouldParallelize2 as shouldParallelize,
|
|
952
|
+
thresholdDispatcher
|
|
953
|
+
};
|