lulz 1.0.2 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +242 -140
- package/TODO.md +132 -0
- package/examples.js +169 -197
- package/index.js +164 -14
- package/package.json +16 -17
- package/src/flow.js +362 -215
- package/src/red-lib.js +595 -0
- package/src/rx-lib.js +679 -0
- package/src/utils.js +270 -0
- package/src/workers.js +367 -0
- package/test.js +505 -279
- package/src/nodes.js +0 -520
package/src/utils.js
ADDED
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* lulz - Utilities
|
|
3
|
+
*
|
|
4
|
+
* Helper functions and utilities.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
// ─────────────────────────────────────────────────────────────
|
|
9
|
+
// Packet Helpers
|
|
10
|
+
// ─────────────────────────────────────────────────────────────
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Create a properly formatted packet
|
|
14
|
+
*/
|
|
15
|
+
export const packet = (payload, meta = {}) => ({
|
|
16
|
+
payload,
|
|
17
|
+
topic: meta.topic ?? '',
|
|
18
|
+
_id: meta._id ?? crypto.randomUUID?.() ?? Math.random().toString(36).slice(2),
|
|
19
|
+
_ts: Date.now(),
|
|
20
|
+
...meta,
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Clone a packet (deep copy)
|
|
25
|
+
*/
|
|
26
|
+
export const clonePacket = (pkt) =>
|
|
27
|
+
JSON.parse(JSON.stringify(pkt));
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Merge packets
|
|
31
|
+
*/
|
|
32
|
+
export const mergePackets = (...packets) =>
|
|
33
|
+
packets.reduce((acc, p) => ({
|
|
34
|
+
...acc,
|
|
35
|
+
...p,
|
|
36
|
+
payload: p.payload ?? acc.payload,
|
|
37
|
+
}), {});
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
// ─────────────────────────────────────────────────────────────
|
|
41
|
+
// Function Composition Helpers
|
|
42
|
+
// ─────────────────────────────────────────────────────────────
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Pipe functions left to right
|
|
46
|
+
*/
|
|
47
|
+
export const pipe = (...fns) => (x) =>
|
|
48
|
+
fns.reduce((v, f) => f(v), x);
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Compose functions right to left
|
|
52
|
+
*/
|
|
53
|
+
export const compose = (...fns) => (x) =>
|
|
54
|
+
fns.reduceRight((v, f) => f(v), x);
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Identity function
|
|
58
|
+
*/
|
|
59
|
+
export const identity = (x) => x;
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Constant function
|
|
63
|
+
*/
|
|
64
|
+
export const constant = (x) => () => x;
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Noop function
|
|
68
|
+
*/
|
|
69
|
+
export const noop = () => {};
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
// ─────────────────────────────────────────────────────────────
|
|
73
|
+
// Async Helpers
|
|
74
|
+
// ─────────────────────────────────────────────────────────────
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Sleep for ms
|
|
78
|
+
*/
|
|
79
|
+
export const sleep = (ms) =>
|
|
80
|
+
new Promise(resolve => setTimeout(resolve, ms));
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Timeout a promise
|
|
84
|
+
*/
|
|
85
|
+
export const withTimeout = (promise, ms, message = 'Timeout') =>
|
|
86
|
+
Promise.race([
|
|
87
|
+
promise,
|
|
88
|
+
new Promise((_, reject) =>
|
|
89
|
+
setTimeout(() => reject(new Error(message)), ms)
|
|
90
|
+
),
|
|
91
|
+
]);
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Retry a function
|
|
95
|
+
*/
|
|
96
|
+
export const withRetry = async (fn, { retries = 3, delay = 1000, backoff = 2 } = {}) => {
|
|
97
|
+
let lastError;
|
|
98
|
+
|
|
99
|
+
for (let i = 0; i <= retries; i++) {
|
|
100
|
+
try {
|
|
101
|
+
return await fn();
|
|
102
|
+
} catch (err) {
|
|
103
|
+
lastError = err;
|
|
104
|
+
if (i < retries) {
|
|
105
|
+
await sleep(delay * Math.pow(backoff, i));
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
throw lastError;
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
// ─────────────────────────────────────────────────────────────
|
|
115
|
+
// Collection Helpers
|
|
116
|
+
// ─────────────────────────────────────────────────────────────
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Chunk array into groups
|
|
120
|
+
*/
|
|
121
|
+
export const chunk = (arr, size) => {
|
|
122
|
+
const chunks = [];
|
|
123
|
+
for (let i = 0; i < arr.length; i += size) {
|
|
124
|
+
chunks.push(arr.slice(i, i + size));
|
|
125
|
+
}
|
|
126
|
+
return chunks;
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Flatten nested arrays
|
|
131
|
+
*/
|
|
132
|
+
export const flatten = (arr, depth = 1) =>
|
|
133
|
+
arr.flat(depth);
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Unique values
|
|
137
|
+
*/
|
|
138
|
+
export const unique = (arr, keyFn = (x) => x) => {
|
|
139
|
+
const seen = new Set();
|
|
140
|
+
return arr.filter(item => {
|
|
141
|
+
const key = keyFn(item);
|
|
142
|
+
if (seen.has(key)) return false;
|
|
143
|
+
seen.add(key);
|
|
144
|
+
return true;
|
|
145
|
+
});
|
|
146
|
+
};
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* Group by key
|
|
150
|
+
*/
|
|
151
|
+
export const groupBy = (arr, keyFn) => {
|
|
152
|
+
return arr.reduce((acc, item) => {
|
|
153
|
+
const key = keyFn(item);
|
|
154
|
+
if (!acc[key]) acc[key] = [];
|
|
155
|
+
acc[key].push(item);
|
|
156
|
+
return acc;
|
|
157
|
+
}, {});
|
|
158
|
+
};
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Partition by predicate
|
|
162
|
+
*/
|
|
163
|
+
export const partition = (arr, predicate) => {
|
|
164
|
+
const truthy = [];
|
|
165
|
+
const falsy = [];
|
|
166
|
+
|
|
167
|
+
for (const item of arr) {
|
|
168
|
+
(predicate(item) ? truthy : falsy).push(item);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
return [truthy, falsy];
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
// ─────────────────────────────────────────────────────────────
|
|
176
|
+
// Object Helpers
|
|
177
|
+
// ─────────────────────────────────────────────────────────────
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Deep clone
|
|
181
|
+
*/
|
|
182
|
+
export const deepClone = (obj) =>
|
|
183
|
+
JSON.parse(JSON.stringify(obj));
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Deep merge
|
|
187
|
+
*/
|
|
188
|
+
export const deepMerge = (target, ...sources) => {
|
|
189
|
+
if (!sources.length) return target;
|
|
190
|
+
const source = sources.shift();
|
|
191
|
+
|
|
192
|
+
if (isObject(target) && isObject(source)) {
|
|
193
|
+
for (const key in source) {
|
|
194
|
+
if (isObject(source[key])) {
|
|
195
|
+
if (!target[key]) Object.assign(target, { [key]: {} });
|
|
196
|
+
deepMerge(target[key], source[key]);
|
|
197
|
+
} else {
|
|
198
|
+
Object.assign(target, { [key]: source[key] });
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
return deepMerge(target, ...sources);
|
|
204
|
+
};
|
|
205
|
+
|
|
206
|
+
const isObject = (item) =>
|
|
207
|
+
item && typeof item === 'object' && !Array.isArray(item);
|
|
208
|
+
|
|
209
|
+
/**
|
|
210
|
+
* Pick keys from object
|
|
211
|
+
*/
|
|
212
|
+
export const pick = (obj, keys) =>
|
|
213
|
+
keys.reduce((acc, key) => {
|
|
214
|
+
if (key in obj) acc[key] = obj[key];
|
|
215
|
+
return acc;
|
|
216
|
+
}, {});
|
|
217
|
+
|
|
218
|
+
/**
|
|
219
|
+
* Omit keys from object
|
|
220
|
+
*/
|
|
221
|
+
export const omit = (obj, keys) => {
|
|
222
|
+
const result = { ...obj };
|
|
223
|
+
for (const key of keys) {
|
|
224
|
+
delete result[key];
|
|
225
|
+
}
|
|
226
|
+
return result;
|
|
227
|
+
};
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
// ─────────────────────────────────────────────────────────────
|
|
231
|
+
// Debug Helpers
|
|
232
|
+
// ─────────────────────────────────────────────────────────────
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Create a debug logger
|
|
236
|
+
*/
|
|
237
|
+
export const createLogger = (namespace, enabled = true) => {
|
|
238
|
+
const log = (...args) => {
|
|
239
|
+
if (enabled) {
|
|
240
|
+
console.log(`[${namespace}]`, ...args);
|
|
241
|
+
}
|
|
242
|
+
};
|
|
243
|
+
|
|
244
|
+
log.error = (...args) => console.error(`[${namespace}:error]`, ...args);
|
|
245
|
+
log.warn = (...args) => console.warn(`[${namespace}:warn]`, ...args);
|
|
246
|
+
log.info = (...args) => enabled && console.info(`[${namespace}:info]`, ...args);
|
|
247
|
+
log.debug = (...args) => enabled && console.debug(`[${namespace}:debug]`, ...args);
|
|
248
|
+
|
|
249
|
+
return log;
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
/**
|
|
253
|
+
* Measure execution time
|
|
254
|
+
*/
|
|
255
|
+
export const measure = async (label, fn) => {
|
|
256
|
+
const start = performance.now();
|
|
257
|
+
const result = await fn();
|
|
258
|
+
const end = performance.now();
|
|
259
|
+
console.log(`[${label}] ${(end - start).toFixed(2)}ms`);
|
|
260
|
+
return result;
|
|
261
|
+
};
|
|
262
|
+
|
|
263
|
+
/**
|
|
264
|
+
* Assert condition
|
|
265
|
+
*/
|
|
266
|
+
export const assert = (condition, message = 'Assertion failed') => {
|
|
267
|
+
if (!condition) {
|
|
268
|
+
throw new Error(message);
|
|
269
|
+
}
|
|
270
|
+
};
|
package/src/workers.js
ADDED
|
@@ -0,0 +1,367 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* lulz - Worker Task Queue
|
|
3
|
+
*
|
|
4
|
+
* EventEmitter-wrapped task queue using:
|
|
5
|
+
* - Worker Threads (Node.js)
|
|
6
|
+
* - Web Workers (browsers)
|
|
7
|
+
*
|
|
8
|
+
* Default worker count: os.cpus().length
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { EventEmitter } from 'events';
|
|
12
|
+
import { cpus } from 'os';
|
|
13
|
+
import { Worker, isMainThread, parentPort, workerData } from 'worker_threads';
|
|
14
|
+
import { fileURLToPath } from 'url';
|
|
15
|
+
import { dirname, join } from 'path';
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
// ─────────────────────────────────────────────────────────────
|
|
19
|
+
// Task Queue
|
|
20
|
+
// ─────────────────────────────────────────────────────────────
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Create a task queue with worker pool
|
|
24
|
+
*
|
|
25
|
+
* @param {Object} options
|
|
26
|
+
* @param {number} options.workers - Number of workers (default: CPU count)
|
|
27
|
+
* @param {string} options.workerScript - Path to worker script
|
|
28
|
+
* @param {Function} options.handler - Task handler for inline workers
|
|
29
|
+
*
|
|
30
|
+
* @returns {EventEmitter} Queue with submit/on API
|
|
31
|
+
*
|
|
32
|
+
* Events:
|
|
33
|
+
* - 'result' - Task completed { id, result }
|
|
34
|
+
* - 'error' - Task failed { id, error }
|
|
35
|
+
* - 'drain' - All tasks completed
|
|
36
|
+
* - 'idle' - Workers are idle
|
|
37
|
+
*
|
|
38
|
+
* Usage:
|
|
39
|
+
* const queue = taskQueue({ workers: 4, handler: (data) => data * 2 });
|
|
40
|
+
* queue.on('result', ({ id, result }) => console.log(result));
|
|
41
|
+
* queue.submit({ id: 1, data: 42 });
|
|
42
|
+
*/
|
|
43
|
+
export function taskQueue(options = {}) {
|
|
44
|
+
const {
|
|
45
|
+
workers: workerCount = cpus().length,
|
|
46
|
+
workerScript = null,
|
|
47
|
+
handler = null,
|
|
48
|
+
} = options;
|
|
49
|
+
|
|
50
|
+
const queue = new EventEmitter();
|
|
51
|
+
const pending = []; // Tasks waiting for a worker
|
|
52
|
+
const running = new Map(); // taskId → worker
|
|
53
|
+
const pool = []; // Available workers
|
|
54
|
+
let taskIdCounter = 0;
|
|
55
|
+
let totalSubmitted = 0;
|
|
56
|
+
let totalCompleted = 0;
|
|
57
|
+
|
|
58
|
+
// ─── Worker Management ───
|
|
59
|
+
|
|
60
|
+
const createWorker = () => {
|
|
61
|
+
let worker;
|
|
62
|
+
|
|
63
|
+
if (workerScript) {
|
|
64
|
+
// External script
|
|
65
|
+
worker = new Worker(workerScript);
|
|
66
|
+
} else if (handler) {
|
|
67
|
+
// Inline handler - create worker with eval
|
|
68
|
+
const handlerStr = handler.toString();
|
|
69
|
+
const code = `
|
|
70
|
+
const { parentPort } = require('worker_threads');
|
|
71
|
+
const handler = ${handlerStr};
|
|
72
|
+
parentPort.on('message', async (task) => {
|
|
73
|
+
try {
|
|
74
|
+
const result = await handler(task.data);
|
|
75
|
+
parentPort.postMessage({ id: task.id, result });
|
|
76
|
+
} catch (error) {
|
|
77
|
+
parentPort.postMessage({ id: task.id, error: error.message });
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
`;
|
|
81
|
+
worker = new Worker(code, { eval: true });
|
|
82
|
+
} else {
|
|
83
|
+
throw new Error('taskQueue requires either workerScript or handler');
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
worker._busy = false;
|
|
87
|
+
|
|
88
|
+
worker.on('message', (msg) => {
|
|
89
|
+
worker._busy = false;
|
|
90
|
+
running.delete(msg.id);
|
|
91
|
+
totalCompleted++;
|
|
92
|
+
|
|
93
|
+
if (msg.error) {
|
|
94
|
+
queue.emit('error', { id: msg.id, error: msg.error });
|
|
95
|
+
} else {
|
|
96
|
+
queue.emit('result', { id: msg.id, result: msg.result });
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// Process next task
|
|
100
|
+
if (pending.length > 0) {
|
|
101
|
+
processNext(worker);
|
|
102
|
+
} else {
|
|
103
|
+
pool.push(worker);
|
|
104
|
+
|
|
105
|
+
if (totalCompleted === totalSubmitted) {
|
|
106
|
+
queue.emit('drain');
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (pool.length === workerCount) {
|
|
110
|
+
queue.emit('idle');
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
worker.on('error', (err) => {
|
|
116
|
+
console.error('[taskQueue] Worker error:', err);
|
|
117
|
+
// Replace crashed worker
|
|
118
|
+
const idx = pool.indexOf(worker);
|
|
119
|
+
if (idx > -1) pool.splice(idx, 1);
|
|
120
|
+
pool.push(createWorker());
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
return worker;
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
// Initialize worker pool
|
|
127
|
+
for (let i = 0; i < workerCount; i++) {
|
|
128
|
+
pool.push(createWorker());
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// ─── Task Processing ───
|
|
132
|
+
|
|
133
|
+
const processNext = (worker) => {
|
|
134
|
+
if (pending.length === 0) return;
|
|
135
|
+
|
|
136
|
+
const task = pending.shift();
|
|
137
|
+
worker._busy = true;
|
|
138
|
+
running.set(task.id, worker);
|
|
139
|
+
worker.postMessage(task);
|
|
140
|
+
};
|
|
141
|
+
|
|
142
|
+
const tryProcess = () => {
|
|
143
|
+
while (pending.length > 0 && pool.length > 0) {
|
|
144
|
+
const worker = pool.pop();
|
|
145
|
+
processNext(worker);
|
|
146
|
+
}
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
// ─── API ───
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Submit a task to the queue
|
|
153
|
+
* @param {Object} task - Task object with data property
|
|
154
|
+
* @returns {number} Task ID
|
|
155
|
+
*/
|
|
156
|
+
queue.submit = (task) => {
|
|
157
|
+
const id = task.id ?? ++taskIdCounter;
|
|
158
|
+
const taskObj = { id, data: task.data ?? task };
|
|
159
|
+
|
|
160
|
+
totalSubmitted++;
|
|
161
|
+
pending.push(taskObj);
|
|
162
|
+
tryProcess();
|
|
163
|
+
|
|
164
|
+
return id;
|
|
165
|
+
};
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Submit multiple tasks
|
|
169
|
+
* @param {Array} tasks - Array of tasks
|
|
170
|
+
* @returns {number[]} Task IDs
|
|
171
|
+
*/
|
|
172
|
+
queue.submitAll = (tasks) => {
|
|
173
|
+
return tasks.map(t => queue.submit(t));
|
|
174
|
+
};
|
|
175
|
+
|
|
176
|
+
/**
|
|
177
|
+
* Get queue statistics
|
|
178
|
+
*/
|
|
179
|
+
queue.stats = () => ({
|
|
180
|
+
pending: pending.length,
|
|
181
|
+
running: running.size,
|
|
182
|
+
available: pool.length,
|
|
183
|
+
totalSubmitted,
|
|
184
|
+
totalCompleted,
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Terminate all workers
|
|
189
|
+
*/
|
|
190
|
+
queue.terminate = async () => {
|
|
191
|
+
const terminatePromises = [];
|
|
192
|
+
|
|
193
|
+
for (const worker of pool) {
|
|
194
|
+
terminatePromises.push(worker.terminate());
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
for (const worker of running.values()) {
|
|
198
|
+
terminatePromises.push(worker.terminate());
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
await Promise.all(terminatePromises);
|
|
202
|
+
pool.length = 0;
|
|
203
|
+
running.clear();
|
|
204
|
+
pending.length = 0;
|
|
205
|
+
|
|
206
|
+
queue.emit('terminated');
|
|
207
|
+
};
|
|
208
|
+
|
|
209
|
+
/**
|
|
210
|
+
* Wait for all tasks to complete
|
|
211
|
+
*/
|
|
212
|
+
queue.drain = () => {
|
|
213
|
+
return new Promise((resolve) => {
|
|
214
|
+
if (totalCompleted === totalSubmitted && pending.length === 0) {
|
|
215
|
+
resolve();
|
|
216
|
+
} else {
|
|
217
|
+
queue.once('drain', resolve);
|
|
218
|
+
}
|
|
219
|
+
});
|
|
220
|
+
};
|
|
221
|
+
|
|
222
|
+
return queue;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
// ─────────────────────────────────────────────────────────────
|
|
227
|
+
// Worker Node for Flow Integration
|
|
228
|
+
// ─────────────────────────────────────────────────────────────
|
|
229
|
+
|
|
230
|
+
/**
|
|
231
|
+
* Worker node - Process packets in worker threads
|
|
232
|
+
*
|
|
233
|
+
* @param {Object} options
|
|
234
|
+
* @param {number} options.workers - Worker count
|
|
235
|
+
* @param {Function} options.handler - Processing function
|
|
236
|
+
*
|
|
237
|
+
* Usage in flow:
|
|
238
|
+
* ['input', worker({ workers: 4, handler: (data) => heavyComputation(data) }), 'output']
|
|
239
|
+
*/
|
|
240
|
+
export function worker(options = {}) {
|
|
241
|
+
const {
|
|
242
|
+
workers: workerCount = cpus().length,
|
|
243
|
+
handler = (data) => data,
|
|
244
|
+
} = options;
|
|
245
|
+
|
|
246
|
+
// Create dedicated queue for this node
|
|
247
|
+
let queue = null;
|
|
248
|
+
const pendingCallbacks = new Map();
|
|
249
|
+
|
|
250
|
+
return (send, packet) => {
|
|
251
|
+
// Lazy initialization
|
|
252
|
+
if (!queue) {
|
|
253
|
+
queue = taskQueue({ workers: workerCount, handler });
|
|
254
|
+
|
|
255
|
+
queue.on('result', ({ id, result }) => {
|
|
256
|
+
const original = pendingCallbacks.get(id);
|
|
257
|
+
pendingCallbacks.delete(id);
|
|
258
|
+
|
|
259
|
+
if (original) {
|
|
260
|
+
send({ ...original, payload: result });
|
|
261
|
+
}
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
queue.on('error', ({ id, error }) => {
|
|
265
|
+
const original = pendingCallbacks.get(id);
|
|
266
|
+
pendingCallbacks.delete(id);
|
|
267
|
+
|
|
268
|
+
if (original) {
|
|
269
|
+
send({ ...original, error });
|
|
270
|
+
}
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
const taskId = queue.submit({ data: packet.payload ?? packet });
|
|
275
|
+
pendingCallbacks.set(taskId, packet);
|
|
276
|
+
};
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
// ─────────────────────────────────────────────────────────────
|
|
281
|
+
// Parallel Map - Map with worker pool
|
|
282
|
+
// ─────────────────────────────────────────────────────────────
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* parallelMap - Process items in parallel using workers
|
|
286
|
+
*
|
|
287
|
+
* @param {Object} options
|
|
288
|
+
* @param {Function} options.fn - Mapping function
|
|
289
|
+
* @param {number} options.workers - Worker count
|
|
290
|
+
* @param {number} options.batchSize - Items per batch
|
|
291
|
+
*/
|
|
292
|
+
export function parallelMap(options = {}) {
|
|
293
|
+
const {
|
|
294
|
+
fn = (x) => x,
|
|
295
|
+
workers: workerCount = cpus().length,
|
|
296
|
+
batchSize = 1,
|
|
297
|
+
} = options;
|
|
298
|
+
|
|
299
|
+
let queue = null;
|
|
300
|
+
const results = new Map();
|
|
301
|
+
let expectedCount = 0;
|
|
302
|
+
|
|
303
|
+
return (send, packet) => {
|
|
304
|
+
if (!queue) {
|
|
305
|
+
queue = taskQueue({ workers: workerCount, handler: fn });
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
const items = Array.isArray(packet.payload) ? packet.payload : [packet.payload];
|
|
309
|
+
expectedCount = items.length;
|
|
310
|
+
results.clear();
|
|
311
|
+
|
|
312
|
+
const handleResult = ({ id, result }) => {
|
|
313
|
+
results.set(id, result);
|
|
314
|
+
|
|
315
|
+
if (results.size === expectedCount) {
|
|
316
|
+
// Preserve order
|
|
317
|
+
const ordered = items.map((_, i) => results.get(i + 1));
|
|
318
|
+
send({ ...packet, payload: ordered });
|
|
319
|
+
}
|
|
320
|
+
};
|
|
321
|
+
|
|
322
|
+
queue.on('result', handleResult);
|
|
323
|
+
|
|
324
|
+
items.forEach((item, i) => {
|
|
325
|
+
queue.submit({ id: i + 1, data: item });
|
|
326
|
+
});
|
|
327
|
+
};
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
// ─────────────────────────────────────────────────────────────
|
|
332
|
+
// CPU-bound Task Helper
|
|
333
|
+
// ─────────────────────────────────────────────────────────────
|
|
334
|
+
|
|
335
|
+
/**
|
|
336
|
+
* cpuTask - Wrap a CPU-bound function for worker execution
|
|
337
|
+
*
|
|
338
|
+
* @param {Function} fn - CPU-bound function
|
|
339
|
+
* @returns {Function} Worker-enabled function
|
|
340
|
+
*/
|
|
341
|
+
export function cpuTask(fn) {
|
|
342
|
+
return worker({ handler: fn });
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
// ─────────────────────────────────────────────────────────────
|
|
347
|
+
// Export for Worker Script Usage
|
|
348
|
+
// ─────────────────────────────────────────────────────────────
|
|
349
|
+
|
|
350
|
+
// If this module is loaded in a worker thread
|
|
351
|
+
if (!isMainThread && parentPort) {
|
|
352
|
+
// This allows the module to be used as a worker script
|
|
353
|
+
// Usage: new Worker('./workers.js', { workerData: { handler: ... } })
|
|
354
|
+
|
|
355
|
+
if (workerData?.handler) {
|
|
356
|
+
const handler = new Function('return ' + workerData.handler)();
|
|
357
|
+
|
|
358
|
+
parentPort.on('message', async (task) => {
|
|
359
|
+
try {
|
|
360
|
+
const result = await handler(task.data);
|
|
361
|
+
parentPort.postMessage({ id: task.id, result });
|
|
362
|
+
} catch (error) {
|
|
363
|
+
parentPort.postMessage({ id: task.id, error: error.message });
|
|
364
|
+
}
|
|
365
|
+
});
|
|
366
|
+
}
|
|
367
|
+
}
|