lulz 2.0.2 ā 2.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +349 -0
- package/examples-queue.js +419 -0
- package/package.json +2 -1
package/README.md
CHANGED
|
@@ -352,6 +352,355 @@ Connect flows in sequence.
|
|
|
352
352
|
|
|
353
353
|
Explicit processing mode markers.
|
|
354
354
|
|
|
355
|
+
## Workers
|
|
356
|
+
|
|
357
|
+
lulz includes a worker pool system for CPU-intensive tasks. It uses **Worker Threads** in Node.js (and Web Workers in browsers) so heavy computation doesn't block your main thread.
|
|
358
|
+
|
|
359
|
+
### Why Workers?
|
|
360
|
+
|
|
361
|
+
JavaScript is single-threaded. If you compute Fibonacci(45), your entire app freezes:
|
|
362
|
+
|
|
363
|
+
```javascript
|
|
364
|
+
// ā Bad: Blocks everything
|
|
365
|
+
['input', (send, packet) => {
|
|
366
|
+
const result = fibonacci(packet.payload); // š§ Frozen for 5 seconds
|
|
367
|
+
send({ ...packet, payload: result });
|
|
368
|
+
}, 'output']
|
|
369
|
+
```
|
|
370
|
+
|
|
371
|
+
Workers run in separate threads:
|
|
372
|
+
|
|
373
|
+
```javascript
|
|
374
|
+
// ā
Good: Non-blocking
|
|
375
|
+
['input', worker({ handler: fibonacci }), 'output']
|
|
376
|
+
// Main thread stays responsive while workers compute
|
|
377
|
+
```
|
|
378
|
+
|
|
379
|
+
---
|
|
380
|
+
|
|
381
|
+
### taskQueue ā Standalone Task Queue
|
|
382
|
+
|
|
383
|
+
The foundation. An EventEmitter that manages a pool of workers.
|
|
384
|
+
|
|
385
|
+
```javascript
|
|
386
|
+
import { taskQueue } from 'lulz';
|
|
387
|
+
|
|
388
|
+
// Create a queue with 4 workers
|
|
389
|
+
const queue = taskQueue({
|
|
390
|
+
workers: 4, // Number of worker threads (default: CPU cores)
|
|
391
|
+
handler: (data) => data * data // Function that runs in worker
|
|
392
|
+
});
|
|
393
|
+
|
|
394
|
+
// Listen for completed tasks
|
|
395
|
+
queue.on('result', ({ id, result }) => {
|
|
396
|
+
console.log(`Task ${id} finished:`, result);
|
|
397
|
+
});
|
|
398
|
+
|
|
399
|
+
// Listen for errors (handler threw an exception)
|
|
400
|
+
queue.on('error', ({ id, error }) => {
|
|
401
|
+
console.error(`Task ${id} failed:`, error);
|
|
402
|
+
});
|
|
403
|
+
|
|
404
|
+
// Listen for all tasks complete
|
|
405
|
+
queue.on('drain', () => {
|
|
406
|
+
console.log('All tasks done!');
|
|
407
|
+
});
|
|
408
|
+
|
|
409
|
+
// Submit a single task
|
|
410
|
+
queue.submit({ id: 'task-1', data: 42 });
|
|
411
|
+
// ā Task task-1 finished: 1764
|
|
412
|
+
|
|
413
|
+
// Submit multiple tasks
|
|
414
|
+
queue.submitAll([
|
|
415
|
+
{ id: 'a', data: 10 },
|
|
416
|
+
{ id: 'b', data: 20 },
|
|
417
|
+
{ id: 'c', data: 30 },
|
|
418
|
+
]);
|
|
419
|
+
```
|
|
420
|
+
|
|
421
|
+
#### Async Handlers
|
|
422
|
+
|
|
423
|
+
Handlers can be async:
|
|
424
|
+
|
|
425
|
+
```javascript
|
|
426
|
+
const queue = taskQueue({
|
|
427
|
+
workers: 2,
|
|
428
|
+
handler: async (url) => {
|
|
429
|
+
const response = await fetch(url);
|
|
430
|
+
return response.json();
|
|
431
|
+
}
|
|
432
|
+
});
|
|
433
|
+
|
|
434
|
+
queue.submit({ data: 'https://api.example.com/data' });
|
|
435
|
+
```
|
|
436
|
+
|
|
437
|
+
#### Queue Control
|
|
438
|
+
|
|
439
|
+
```javascript
|
|
440
|
+
// Check queue status
|
|
441
|
+
console.log(queue.stats());
|
|
442
|
+
// ā { pending: 5, running: 4, available: 0, totalSubmitted: 9, totalCompleted: 4 }
|
|
443
|
+
|
|
444
|
+
// Wait for all tasks to complete
|
|
445
|
+
await queue.drain();
|
|
446
|
+
|
|
447
|
+
// Shut down all workers
|
|
448
|
+
await queue.terminate();
|
|
449
|
+
```
|
|
450
|
+
|
|
451
|
+
---
|
|
452
|
+
|
|
453
|
+
### worker ā Flow Integration
|
|
454
|
+
|
|
455
|
+
Use workers directly in your flows. Packets go in, get processed in worker threads, come out.
|
|
456
|
+
|
|
457
|
+
```javascript
|
|
458
|
+
import { flow, worker } from 'lulz';
|
|
459
|
+
|
|
460
|
+
const app = flow([
|
|
461
|
+
['numbers',
|
|
462
|
+
// This runs in a worker thread, not the main thread
|
|
463
|
+
worker({
|
|
464
|
+
workers: 4,
|
|
465
|
+
handler: (n) => {
|
|
466
|
+
// Heavy computation here
|
|
467
|
+
let sum = 0;
|
|
468
|
+
for (let i = 0; i < n * 1000000; i++) {
|
|
469
|
+
sum += Math.sqrt(i);
|
|
470
|
+
}
|
|
471
|
+
return sum;
|
|
472
|
+
}
|
|
473
|
+
}),
|
|
474
|
+
'results'
|
|
475
|
+
],
|
|
476
|
+
|
|
477
|
+
['results', debug({ name: 'computed' })],
|
|
478
|
+
]);
|
|
479
|
+
|
|
480
|
+
// Send numbers to process
|
|
481
|
+
app.emit('numbers', { payload: 100 });
|
|
482
|
+
app.emit('numbers', { payload: 200 });
|
|
483
|
+
app.emit('numbers', { payload: 300 });
|
|
484
|
+
// Results arrive as workers complete (may be out of order)
|
|
485
|
+
```
|
|
486
|
+
|
|
487
|
+
#### Preserves Packet Metadata
|
|
488
|
+
|
|
489
|
+
The worker node keeps your packet's other properties intact:
|
|
490
|
+
|
|
491
|
+
```javascript
|
|
492
|
+
app.emit('numbers', {
|
|
493
|
+
payload: 100,
|
|
494
|
+
userId: 'alice', // ā preserved
|
|
495
|
+
requestId: 'req-123' // ā preserved
|
|
496
|
+
});
|
|
497
|
+
|
|
498
|
+
// Output packet:
|
|
499
|
+
// { payload: 12345.67, userId: 'alice', requestId: 'req-123' }
|
|
500
|
+
```
|
|
501
|
+
|
|
502
|
+
---
|
|
503
|
+
|
|
504
|
+
### parallelMap ā Process Arrays
|
|
505
|
+
|
|
506
|
+
When you have an array and want each item processed in parallel:
|
|
507
|
+
|
|
508
|
+
```javascript
|
|
509
|
+
import { flow, parallelMap } from 'lulz';
|
|
510
|
+
|
|
511
|
+
const app = flow([
|
|
512
|
+
['images',
|
|
513
|
+
parallelMap({
|
|
514
|
+
workers: 4,
|
|
515
|
+
fn: (image) => {
|
|
516
|
+
// Each image processed in its own worker
|
|
517
|
+
return {
|
|
518
|
+
...image,
|
|
519
|
+
thumbnail: generateThumbnail(image),
|
|
520
|
+
compressed: compress(image)
|
|
521
|
+
};
|
|
522
|
+
}
|
|
523
|
+
}),
|
|
524
|
+
'processed'
|
|
525
|
+
],
|
|
526
|
+
]);
|
|
527
|
+
|
|
528
|
+
// Send an array
|
|
529
|
+
app.emit('images', {
|
|
530
|
+
payload: [image1, image2, image3, image4, image5]
|
|
531
|
+
});
|
|
532
|
+
|
|
533
|
+
// Receive complete array (order preserved!)
|
|
534
|
+
// { payload: [processed1, processed2, processed3, processed4, processed5] }
|
|
535
|
+
```
|
|
536
|
+
|
|
537
|
+
Key difference from `worker`:
|
|
538
|
+
- `worker`: Each packet = one task, results stream out
|
|
539
|
+
- `parallelMap`: One packet with array = many tasks, waits for all, emits single array
|
|
540
|
+
|
|
541
|
+
---
|
|
542
|
+
|
|
543
|
+
### cpuTask ā Quick Wrapper
|
|
544
|
+
|
|
545
|
+
Shorthand when you just want to run a function in a worker:
|
|
546
|
+
|
|
547
|
+
```javascript
|
|
548
|
+
import { flow, cpuTask } from 'lulz';
|
|
549
|
+
|
|
550
|
+
// Instead of this:
|
|
551
|
+
worker({ handler: (n) => fibonacci(n) })
|
|
552
|
+
|
|
553
|
+
// Write this:
|
|
554
|
+
cpuTask((n) => fibonacci(n))
|
|
555
|
+
```
|
|
556
|
+
|
|
557
|
+
Example:
|
|
558
|
+
|
|
559
|
+
```javascript
|
|
560
|
+
const app = flow([
|
|
561
|
+
['input', cpuTask(expensiveCalculation), 'output'],
|
|
562
|
+
]);
|
|
563
|
+
```
|
|
564
|
+
|
|
565
|
+
It's just sugar for `worker({ handler: fn })` with default worker count.
|
|
566
|
+
|
|
567
|
+
---
|
|
568
|
+
|
|
569
|
+
### Patterns
|
|
570
|
+
|
|
571
|
+
#### Pattern 1: Fan-Out Computation
|
|
572
|
+
|
|
573
|
+
Process the same data multiple ways in parallel:
|
|
574
|
+
|
|
575
|
+
```javascript
|
|
576
|
+
const app = flow([
|
|
577
|
+
['data', [
|
|
578
|
+
worker({ handler: analyzeWithMethodA }),
|
|
579
|
+
worker({ handler: analyzeWithMethodB }),
|
|
580
|
+
worker({ handler: analyzeWithMethodC }),
|
|
581
|
+
], 'analyzed'],
|
|
582
|
+
]);
|
|
583
|
+
// All three analyses run simultaneously in different workers
|
|
584
|
+
```
|
|
585
|
+
|
|
586
|
+
#### Pattern 2: Pipeline with Mixed Threading
|
|
587
|
+
|
|
588
|
+
Some steps in main thread, heavy steps in workers:
|
|
589
|
+
|
|
590
|
+
```javascript
|
|
591
|
+
const app = flow([
|
|
592
|
+
['request',
|
|
593
|
+
validate, // Fast: main thread
|
|
594
|
+
parseInput, // Fast: main thread
|
|
595
|
+
worker({ handler: heavyTransform }), // Slow: worker
|
|
596
|
+
formatOutput, // Fast: main thread
|
|
597
|
+
'response'
|
|
598
|
+
],
|
|
599
|
+
]);
|
|
600
|
+
```
|
|
601
|
+
|
|
602
|
+
#### Pattern 3: Batch Processing
|
|
603
|
+
|
|
604
|
+
Split ā process in workers ā join:
|
|
605
|
+
|
|
606
|
+
```javascript
|
|
607
|
+
const app = flow([
|
|
608
|
+
// Split array into individual items
|
|
609
|
+
['batch', split(), 'item'],
|
|
610
|
+
|
|
611
|
+
// Process each in workers
|
|
612
|
+
['item', worker({ handler: processOne }), 'processed'],
|
|
613
|
+
|
|
614
|
+
// Collect results (need custom collector)
|
|
615
|
+
['processed', join({ count: expectedCount }), 'complete'],
|
|
616
|
+
]);
|
|
617
|
+
```
|
|
618
|
+
|
|
619
|
+
Or just use `parallelMap` which does this for you:
|
|
620
|
+
|
|
621
|
+
```javascript
|
|
622
|
+
const app = flow([
|
|
623
|
+
['batch', parallelMap({ fn: processOne }), 'complete'],
|
|
624
|
+
]);
|
|
625
|
+
```
|
|
626
|
+
|
|
627
|
+
---
|
|
628
|
+
|
|
629
|
+
### Error Handling
|
|
630
|
+
|
|
631
|
+
Worker errors don't crash your app. They emit on the `'error'` event:
|
|
632
|
+
|
|
633
|
+
```javascript
|
|
634
|
+
const queue = taskQueue({
|
|
635
|
+
handler: (data) => {
|
|
636
|
+
if (data < 0) throw new Error('Negative not allowed');
|
|
637
|
+
return Math.sqrt(data);
|
|
638
|
+
}
|
|
639
|
+
});
|
|
640
|
+
|
|
641
|
+
queue.on('result', ({ id, result }) => {
|
|
642
|
+
console.log(`${id} = ${result}`);
|
|
643
|
+
});
|
|
644
|
+
|
|
645
|
+
queue.on('error', ({ id, error }) => {
|
|
646
|
+
console.log(`${id} failed: ${error}`);
|
|
647
|
+
});
|
|
648
|
+
|
|
649
|
+
queue.submit({ id: 'good', data: 16 }); // ā good = 4
|
|
650
|
+
queue.submit({ id: 'bad', data: -1 }); // ā bad failed: Negative not allowed
|
|
651
|
+
```
|
|
652
|
+
|
|
653
|
+
In flows, errors become packet properties:
|
|
654
|
+
|
|
655
|
+
```javascript
|
|
656
|
+
['input', worker({ handler: riskyFunction }), 'output']
|
|
657
|
+
|
|
658
|
+
// If handler throws, packet becomes:
|
|
659
|
+
// { payload: ..., error: 'Error message' }
|
|
660
|
+
```
|
|
661
|
+
|
|
662
|
+
---
|
|
663
|
+
|
|
664
|
+
### Configuration
|
|
665
|
+
|
|
666
|
+
```javascript
|
|
667
|
+
import { cpus } from 'os';
|
|
668
|
+
|
|
669
|
+
taskQueue({
|
|
670
|
+
workers: cpus().length, // Default: number of CPU cores
|
|
671
|
+
handler: fn, // Required: function to run in worker
|
|
672
|
+
})
|
|
673
|
+
|
|
674
|
+
worker({
|
|
675
|
+
workers: 4, // Default: number of CPU cores
|
|
676
|
+
handler: fn, // Required: function to run in worker
|
|
677
|
+
})
|
|
678
|
+
|
|
679
|
+
parallelMap({
|
|
680
|
+
workers: 4, // Default: number of CPU cores
|
|
681
|
+
fn: fn, // Required: function to run in worker
|
|
682
|
+
})
|
|
683
|
+
```
|
|
684
|
+
|
|
685
|
+
---
|
|
686
|
+
|
|
687
|
+
### When to Use Workers
|
|
688
|
+
|
|
689
|
+
ā
**Use workers for:**
|
|
690
|
+
- Mathematical computations (crypto, statistics, ML inference)
|
|
691
|
+
- Image/video processing
|
|
692
|
+
- Data parsing (large JSON, CSV)
|
|
693
|
+
- Compression/decompression
|
|
694
|
+
- Any task taking >50ms
|
|
695
|
+
|
|
696
|
+
ā **Don't use workers for:**
|
|
697
|
+
- Simple transformations (`x * 2`)
|
|
698
|
+
- I/O-bound tasks (use async/await instead)
|
|
699
|
+
- Tasks needing DOM access (workers can't touch DOM)
|
|
700
|
+
- Very small tasks (worker overhead > computation)
|
|
701
|
+
|
|
702
|
+
The overhead of sending data to a worker and back is ~1-5ms. If your task takes less than that, just run it in the main thread.
|
|
703
|
+
|
|
355
704
|
## Project Structure
|
|
356
705
|
|
|
357
706
|
```
|
|
@@ -0,0 +1,419 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* lulz - Worker Queue Examples
|
|
3
|
+
*
|
|
4
|
+
* Demonstrates:
|
|
5
|
+
* - taskQueue: Standalone task queue with worker pool
|
|
6
|
+
* - worker: Flow-integrated worker node
|
|
7
|
+
* - parallelMap: Parallel array processing
|
|
8
|
+
* - cpuTask: Quick wrapper for CPU-bound functions
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import {
|
|
12
|
+
flow,
|
|
13
|
+
inject,
|
|
14
|
+
debug,
|
|
15
|
+
func,
|
|
16
|
+
map,
|
|
17
|
+
split,
|
|
18
|
+
taskQueue,
|
|
19
|
+
worker,
|
|
20
|
+
parallelMap,
|
|
21
|
+
cpuTask
|
|
22
|
+
} from './index.js';
|
|
23
|
+
|
|
24
|
+
import { cpus } from 'os';
|
|
25
|
+
|
|
26
|
+
console.log(`\nš„ļø System has ${cpus().length} CPU cores\n`);
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
30
|
+
// Example 1: Basic taskQueue
|
|
31
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
32
|
+
|
|
33
|
+
console.log('āāā Example 1: Basic taskQueue āāā\n');
|
|
34
|
+
|
|
35
|
+
// Create a queue with a simple handler
|
|
36
|
+
const queue1 = taskQueue({
|
|
37
|
+
workers: 2,
|
|
38
|
+
handler: (data) => {
|
|
39
|
+
// Simulate CPU work
|
|
40
|
+
let result = 0;
|
|
41
|
+
for (let i = 0; i < 1_000_000; i++) {
|
|
42
|
+
result += Math.sqrt(i);
|
|
43
|
+
}
|
|
44
|
+
return { input: data, computed: Math.round(result) };
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
// Listen for results
|
|
49
|
+
queue1.on('result', ({ id, result }) => {
|
|
50
|
+
console.log(`[Queue1] Task ${id} completed:`, result);
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
queue1.on('error', ({ id, error }) => {
|
|
54
|
+
console.error(`[Queue1] Task ${id} failed:`, error);
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
queue1.on('drain', () => {
|
|
58
|
+
console.log('[Queue1] All tasks completed!\n');
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
// Submit tasks
|
|
62
|
+
console.log('[Queue1] Submitting 5 tasks...');
|
|
63
|
+
for (let i = 1; i <= 5; i++) {
|
|
64
|
+
queue1.submit({ id: i, data: i * 10 });
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
69
|
+
// Example 2: taskQueue with async handler
|
|
70
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
71
|
+
|
|
72
|
+
setTimeout(() => {
|
|
73
|
+
console.log('āāā Example 2: Async Handler āāā\n');
|
|
74
|
+
|
|
75
|
+
const queue2 = taskQueue({
|
|
76
|
+
workers: 3,
|
|
77
|
+
handler: async (data) => {
|
|
78
|
+
// Simulate async work (API call, file I/O, etc.)
|
|
79
|
+
await new Promise(r => setTimeout(r, 100 + Math.random() * 200));
|
|
80
|
+
return `Processed: ${data.toUpperCase()}`;
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
const results = [];
|
|
85
|
+
|
|
86
|
+
queue2.on('result', ({ id, result }) => {
|
|
87
|
+
results.push(result);
|
|
88
|
+
console.log(`[Queue2] Task ${id}: ${result}`);
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
queue2.on('drain', () => {
|
|
92
|
+
console.log(`[Queue2] All done! Results:`, results);
|
|
93
|
+
console.log();
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
// Submit multiple tasks
|
|
97
|
+
['apple', 'banana', 'cherry', 'date', 'elderberry'].forEach((fruit, i) => {
|
|
98
|
+
queue2.submit({ id: i + 1, data: fruit });
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
}, 1500);
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
105
|
+
// Example 3: taskQueue statistics and control
|
|
106
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
107
|
+
|
|
108
|
+
setTimeout(() => {
|
|
109
|
+
console.log('āāā Example 3: Queue Statistics āāā\n');
|
|
110
|
+
|
|
111
|
+
const queue3 = taskQueue({
|
|
112
|
+
workers: 2,
|
|
113
|
+
handler: async (data) => {
|
|
114
|
+
await new Promise(r => setTimeout(r, 150));
|
|
115
|
+
return data * 2;
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
queue3.on('result', ({ id, result }) => {
|
|
120
|
+
console.log(`[Queue3] Task ${id} = ${result}`);
|
|
121
|
+
console.log(`[Queue3] Stats:`, queue3.stats());
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
// Submit tasks and check stats
|
|
125
|
+
console.log('[Queue3] Initial stats:', queue3.stats());
|
|
126
|
+
|
|
127
|
+
queue3.submitAll([
|
|
128
|
+
{ id: 'a', data: 10 },
|
|
129
|
+
{ id: 'b', data: 20 },
|
|
130
|
+
{ id: 'c', data: 30 },
|
|
131
|
+
{ id: 'd', data: 40 },
|
|
132
|
+
]);
|
|
133
|
+
|
|
134
|
+
console.log('[Queue3] After submit:', queue3.stats());
|
|
135
|
+
|
|
136
|
+
// Wait for completion
|
|
137
|
+
queue3.drain().then(() => {
|
|
138
|
+
console.log('[Queue3] Final stats:', queue3.stats());
|
|
139
|
+
console.log();
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
}, 3000);
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
146
|
+
// Example 4: worker() in a flow
|
|
147
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
148
|
+
|
|
149
|
+
setTimeout(() => {
|
|
150
|
+
console.log('āāā Example 4: worker() in Flow āāā\n');
|
|
151
|
+
|
|
152
|
+
// Heavy computation that runs in worker thread
|
|
153
|
+
const heavyComputation = (n) => {
|
|
154
|
+
let result = 0;
|
|
155
|
+
for (let i = 0; i < n * 100000; i++) {
|
|
156
|
+
result += Math.sin(i) * Math.cos(i);
|
|
157
|
+
}
|
|
158
|
+
return Math.round(result * 1000) / 1000;
|
|
159
|
+
};
|
|
160
|
+
|
|
161
|
+
const app = flow([
|
|
162
|
+
// Input numbers
|
|
163
|
+
['numbers',
|
|
164
|
+
// Process in worker threads (doesn't block main thread!)
|
|
165
|
+
worker({
|
|
166
|
+
workers: 2,
|
|
167
|
+
handler: heavyComputation
|
|
168
|
+
}),
|
|
169
|
+
'computed'
|
|
170
|
+
],
|
|
171
|
+
|
|
172
|
+
// Log results
|
|
173
|
+
['computed', debug({ name: 'result', complete: true })],
|
|
174
|
+
]);
|
|
175
|
+
|
|
176
|
+
// Inject numbers to process
|
|
177
|
+
console.log('[Worker Flow] Processing numbers in parallel workers...');
|
|
178
|
+
[10, 20, 30, 40, 50].forEach((n, i) => {
|
|
179
|
+
setTimeout(() => {
|
|
180
|
+
console.log(`[Worker Flow] Submitting: ${n}`);
|
|
181
|
+
app.emit('numbers', { payload: n, id: i });
|
|
182
|
+
}, i * 100);
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
}, 5000);
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
189
|
+
// Example 5: parallelMap for batch processing
|
|
190
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
191
|
+
|
|
192
|
+
setTimeout(() => {
|
|
193
|
+
console.log('\nāāā Example 5: parallelMap āāā\n');
|
|
194
|
+
|
|
195
|
+
// Process entire arrays in parallel
|
|
196
|
+
const batchProcessor = flow([
|
|
197
|
+
['batch',
|
|
198
|
+
parallelMap({
|
|
199
|
+
workers: 4,
|
|
200
|
+
fn: (item) => {
|
|
201
|
+
// Each item processed in its own worker
|
|
202
|
+
return {
|
|
203
|
+
original: item,
|
|
204
|
+
squared: item * item,
|
|
205
|
+
sqrt: Math.sqrt(item),
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
}),
|
|
209
|
+
'processed'
|
|
210
|
+
],
|
|
211
|
+
|
|
212
|
+
['processed', debug({ name: 'batch-result', complete: true })],
|
|
213
|
+
]);
|
|
214
|
+
|
|
215
|
+
console.log('[parallelMap] Processing array [1, 4, 9, 16, 25]...');
|
|
216
|
+
batchProcessor.emit('batch', { payload: [1, 4, 9, 16, 25] });
|
|
217
|
+
|
|
218
|
+
}, 7500);
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
222
|
+
// Example 6: cpuTask shorthand
|
|
223
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
224
|
+
|
|
225
|
+
setTimeout(() => {
|
|
226
|
+
console.log('\nāāā Example 6: cpuTask Shorthand āāā\n');
|
|
227
|
+
|
|
228
|
+
// cpuTask is a quick way to wrap CPU-intensive functions
|
|
229
|
+
const fibonacci = (n) => {
|
|
230
|
+
if (n <= 1) return n;
|
|
231
|
+
let a = 0, b = 1;
|
|
232
|
+
for (let i = 2; i <= n; i++) {
|
|
233
|
+
[a, b] = [b, a + b];
|
|
234
|
+
}
|
|
235
|
+
return b;
|
|
236
|
+
};
|
|
237
|
+
|
|
238
|
+
const app = flow([
|
|
239
|
+
['input',
|
|
240
|
+
cpuTask(fibonacci), // Runs in worker thread
|
|
241
|
+
debug({ name: 'fibonacci' })
|
|
242
|
+
],
|
|
243
|
+
]);
|
|
244
|
+
|
|
245
|
+
console.log('[cpuTask] Computing Fibonacci numbers...');
|
|
246
|
+
[10, 20, 30, 40, 45].forEach((n, i) => {
|
|
247
|
+
setTimeout(() => {
|
|
248
|
+
console.log(`[cpuTask] fib(${n}) = ...`);
|
|
249
|
+
app.emit('input', { payload: n });
|
|
250
|
+
}, i * 200);
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
}, 9000);
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
257
|
+
// Example 7: Real-world pattern - Image Processing Pipeline
|
|
258
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
259
|
+
|
|
260
|
+
setTimeout(() => {
|
|
261
|
+
console.log('\nāāā Example 7: Image Processing Pipeline āāā\n');
|
|
262
|
+
|
|
263
|
+
// Simulated image processing functions
|
|
264
|
+
const processImage = (img) => {
|
|
265
|
+
// Simulate resize
|
|
266
|
+
const resized = { ...img, width: 800, height: 600 };
|
|
267
|
+
// Simulate compress
|
|
268
|
+
const compressed = { ...resized, size: Math.round(img.size * 0.3) };
|
|
269
|
+
// Simulate watermark
|
|
270
|
+
const final = { ...compressed, watermark: true };
|
|
271
|
+
return final;
|
|
272
|
+
};
|
|
273
|
+
|
|
274
|
+
const imageProcessor = flow([
|
|
275
|
+
// Images come in
|
|
276
|
+
['upload', func({ func: (msg) => {
|
|
277
|
+
console.log(`[Image] Uploading: ${msg.payload.name} (${msg.payload.size} bytes)`);
|
|
278
|
+
return msg;
|
|
279
|
+
}}), 'image'],
|
|
280
|
+
|
|
281
|
+
// Process in parallel workers
|
|
282
|
+
['image',
|
|
283
|
+
worker({
|
|
284
|
+
workers: 2,
|
|
285
|
+
handler: processImage
|
|
286
|
+
}),
|
|
287
|
+
'processed'
|
|
288
|
+
],
|
|
289
|
+
|
|
290
|
+
// Save results
|
|
291
|
+
['processed',
|
|
292
|
+
func({ func: (msg) => {
|
|
293
|
+
const p = msg.payload;
|
|
294
|
+
console.log(`[Image] Done: ${p.name} ā ${p.width}x${p.height}, ${p.size} bytes, watermark: ${p.watermark}`);
|
|
295
|
+
return msg;
|
|
296
|
+
}}),
|
|
297
|
+
'saved'
|
|
298
|
+
],
|
|
299
|
+
]);
|
|
300
|
+
|
|
301
|
+
// Simulate image uploads
|
|
302
|
+
const images = [
|
|
303
|
+
{ name: 'photo1.jpg', size: 5000000, width: 4000, height: 3000 },
|
|
304
|
+
{ name: 'photo2.jpg', size: 3500000, width: 3000, height: 2000 },
|
|
305
|
+
{ name: 'photo3.jpg', size: 8000000, width: 6000, height: 4000 },
|
|
306
|
+
];
|
|
307
|
+
|
|
308
|
+
images.forEach((img, i) => {
|
|
309
|
+
setTimeout(() => {
|
|
310
|
+
imageProcessor.emit('upload', { payload: img });
|
|
311
|
+
}, i * 300);
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
}, 11000);
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
318
|
+
// Example 8: Error handling in workers
|
|
319
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
320
|
+
|
|
321
|
+
setTimeout(() => {
|
|
322
|
+
console.log('\nāāā Example 8: Error Handling āāā\n');
|
|
323
|
+
|
|
324
|
+
const riskyQueue = taskQueue({
|
|
325
|
+
workers: 2,
|
|
326
|
+
handler: (data) => {
|
|
327
|
+
if (data < 0) {
|
|
328
|
+
throw new Error(`Negative numbers not allowed: ${data}`);
|
|
329
|
+
}
|
|
330
|
+
return Math.sqrt(data);
|
|
331
|
+
}
|
|
332
|
+
});
|
|
333
|
+
|
|
334
|
+
riskyQueue.on('result', ({ id, result }) => {
|
|
335
|
+
console.log(`[Risky] Task ${id} succeeded: ā${id} = ${result}`);
|
|
336
|
+
});
|
|
337
|
+
|
|
338
|
+
riskyQueue.on('error', ({ id, error }) => {
|
|
339
|
+
console.log(`[Risky] Task ${id} FAILED: ${error}`);
|
|
340
|
+
});
|
|
341
|
+
|
|
342
|
+
riskyQueue.on('drain', () => {
|
|
343
|
+
console.log('[Risky] Queue drained (some may have failed)\n');
|
|
344
|
+
});
|
|
345
|
+
|
|
346
|
+
// Submit mix of valid and invalid
|
|
347
|
+
[4, 9, -1, 16, -25, 36].forEach((n, i) => {
|
|
348
|
+
riskyQueue.submit({ id: n, data: n });
|
|
349
|
+
});
|
|
350
|
+
|
|
351
|
+
}, 13000);
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
355
|
+
// Example 9: Combining with RxJS-style operators
|
|
356
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
357
|
+
|
|
358
|
+
setTimeout(() => {
|
|
359
|
+
console.log('\nāāā Example 9: Workers + Rx Operators āāā\n');
|
|
360
|
+
|
|
361
|
+
const dataProcessor = flow([
|
|
362
|
+
// Split array into individual items
|
|
363
|
+
['data', split(), 'item'],
|
|
364
|
+
|
|
365
|
+
// Process each item in worker
|
|
366
|
+
['item',
|
|
367
|
+
map({ fn: (x) => x * 2 }), // Quick transform in main thread
|
|
368
|
+
worker({ // Heavy work in worker
|
|
369
|
+
workers: 2,
|
|
370
|
+
handler: (n) => {
|
|
371
|
+
let sum = 0;
|
|
372
|
+
for (let i = 0; i < n * 10000; i++) sum += Math.random();
|
|
373
|
+
return { n, sum: Math.round(sum) };
|
|
374
|
+
}
|
|
375
|
+
}),
|
|
376
|
+
'processed'
|
|
377
|
+
],
|
|
378
|
+
|
|
379
|
+
['processed', debug({ name: 'processed' })],
|
|
380
|
+
]);
|
|
381
|
+
|
|
382
|
+
console.log('[Rx+Workers] Processing [5, 10, 15, 20, 25]...');
|
|
383
|
+
dataProcessor.emit('data', { payload: [5, 10, 15, 20, 25] });
|
|
384
|
+
|
|
385
|
+
}, 15000);
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
389
|
+
// Cleanup and summary
|
|
390
|
+
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|
|
391
|
+
|
|
392
|
+
setTimeout(() => {
|
|
393
|
+
console.log('\nāāā Summary āāā\n');
|
|
394
|
+
console.log('Worker utilities in lulz:');
|
|
395
|
+
console.log('');
|
|
396
|
+
console.log(' taskQueue({ workers, handler })');
|
|
397
|
+
console.log(' ā Standalone EventEmitter-based task queue');
|
|
398
|
+
console.log(' ā .submit(task), .submitAll(tasks), .drain()');
|
|
399
|
+
console.log(' ā Events: result, error, drain, idle');
|
|
400
|
+
console.log('');
|
|
401
|
+
console.log(' worker({ workers, handler })');
|
|
402
|
+
console.log(' ā Flow node that processes packets in workers');
|
|
403
|
+
console.log(' ā Non-blocking, preserves packet metadata');
|
|
404
|
+
console.log('');
|
|
405
|
+
console.log(' parallelMap({ workers, fn })');
|
|
406
|
+
console.log(' ā Process arrays with parallel workers');
|
|
407
|
+
console.log(' ā Preserves order, emits complete array');
|
|
408
|
+
console.log('');
|
|
409
|
+
console.log(' cpuTask(fn)');
|
|
410
|
+
console.log(' ā Quick wrapper: cpuTask(x => x*x)');
|
|
411
|
+
console.log(' ā Shorthand for worker({ handler: fn })');
|
|
412
|
+
console.log('');
|
|
413
|
+
console.log('āāā All Examples Complete āāā\n');
|
|
414
|
+
|
|
415
|
+
// Terminate queues
|
|
416
|
+
queue1.terminate();
|
|
417
|
+
|
|
418
|
+
process.exit(0);
|
|
419
|
+
}, 18000);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "lulz",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.3",
|
|
4
4
|
"description": "A reactive dataflow system that makes coders happy",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "index.js",
|
|
@@ -13,6 +13,7 @@
|
|
|
13
13
|
"scripts": {
|
|
14
14
|
"test": "node test.js",
|
|
15
15
|
"examples": "node examples.js",
|
|
16
|
+
"examples:queue": "node examples-queue.js",
|
|
16
17
|
"save": "git add .; git commit -m 'Updated Release'; npm version patch; npm publish; git push --follow-tags;"
|
|
17
18
|
},
|
|
18
19
|
"keywords": [
|