muriel 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,10 @@
1
+ {
2
+ "permissions": {
3
+ "allow": [
4
+ "Bash(node:*)",
5
+ "Bash(ls:*)",
6
+ "Bash(bash:*)",
7
+ "Bash(npm test)"
8
+ ]
9
+ }
10
+ }
package/README.md ADDED
@@ -0,0 +1,175 @@
1
+ # Lightweight Filtergraph Flow Engine
2
+
3
+ A tiny, human-readable flow engine inspired by FFmpeg filtergraphs and
4
+ Processing modes.
5
+
6
+ No nodes. No operators. No DSL gymnastics.
7
+
8
+ ---
9
+
10
+ ## Core Ideas
11
+
12
+ - **Named pipes** are event channels
13
+ - **Edges describe pipelines**
14
+ - **Arrays mean parallel**
15
+ - **Parallel stages auto-join**
16
+ - **Everything else is series**
17
+ - **Worker threads are optional**
18
+
19
+ ---
20
+
21
+ ## Transform API
22
+
23
+ ```js
24
+ function myTransform(options = {}) {
25
+ return (send, packet) => {
26
+ send({ ...packet, value: packet.value * 2 });
27
+ };
28
+ }
29
+ ````
30
+
31
+ * Outer function = configuration
32
+ * Inner function = execution
33
+ * Async supported
34
+
35
+ ---
36
+
37
+ ## Filtergraph Syntax
38
+
39
+ An edge is:
40
+
41
+ ```js
42
+ [
43
+ inputPipe,
44
+ stage1,
45
+ stage2,
46
+ ...stageN,
47
+ outputPipe
48
+ ]
49
+ ```
50
+
51
+ ### Series
52
+
53
+ ```js
54
+ ['post', normalize, verify, 'updated']
55
+ ```
56
+
57
+ ### Parallel (auto-joined)
58
+
59
+ ```js
60
+ ['post', [cover, audio, post], 'updated']
61
+ ```
62
+
63
+ ### Parallel → Series
64
+
65
+ ```js
66
+ ['post', [cover, audio, post], verify, backup, 'updated']
67
+ ```
68
+
69
+ Semantics:
70
+
71
+ ```
72
+ (post)
73
+ ├─► cover
74
+ ├─► audio
75
+ └─► post
76
+
77
+ [auto-join]
78
+
79
+ verify → backup → (updated)
80
+ ```
81
+
82
+ No explicit barrier required.
83
+
84
+ ---
85
+
86
+ ## Producers
87
+
88
+ ```js
89
+ function socket(channel) {
90
+ return send => {
91
+ setTimeout(() => {
92
+ send({ payload: { type: 'new-post' }, topic: channel });
93
+ }, 100);
94
+ };
95
+ }
96
+ ```
97
+
98
+ Usage:
99
+
100
+ ```js
101
+ [socket('post'), 'post']
102
+ ```
103
+
104
+ ---
105
+
106
+ ## Example: Blog Builder
107
+
108
+ ```js
109
+ import { flow } from './index.js';
110
+
111
+ const blog = flow(
112
+ [
113
+ [socket('post'), 'post'],
114
+ ['post', [cover, audio, post], verify, backup, 'updated'],
115
+ ['updated', pagerizer, 'done']
116
+ ],
117
+ {
118
+ context: { user: 'alice' },
119
+ workers: 8 // optional
120
+ }
121
+ );
122
+
123
+ blog.start();
124
+ ```
125
+
126
+ ---
127
+
128
+ ## Worker Threads
129
+
130
+ * Disabled when `workers === undefined`
131
+ * Enabled with fixed pool size
132
+ * Best for CPU-heavy transforms
133
+
134
+ ```js
135
+ flow(graph, { workers: 4 });
136
+ ```
137
+
138
+ ---
139
+
140
+ ## Disposal
141
+
142
+ Always explicit.
143
+
144
+ ```js
145
+ blog.dispose();
146
+ ```
147
+
148
+ * Removes all listeners
149
+ * Terminates worker threads
150
+ * Tears down entire graph
151
+
152
+ ---
153
+
154
+ ## Design Rules (Important)
155
+
156
+ 1. Arrays always imply parallelism
157
+ 2. Parallel stages always auto-join
158
+ 3. Join happens before the next stage
159
+ 4. Output pipes only receive joined packets
160
+ 5. Graphs are static and readable
161
+
162
+ If you can draw the graph on a whiteboard, the API is correct.
163
+
164
+ ---
165
+
166
+ ## Philosophy
167
+
168
+ Think **FFmpeg filtergraphs**, not Node-RED nodes.
169
+ Think **dataflow**, not operators.
170
+
171
+ ---
172
+
173
+ ## License
174
+
175
+ MIT
package/index.js ADDED
@@ -0,0 +1,246 @@
1
+ // index.js
2
+ import { EventEmitter } from 'events';
3
+ import { Worker } from 'worker_threads';
4
+ import os from 'os';
5
+
6
+ /* ─────────────────────────────────────────────
7
+ * Pipes
8
+ * ───────────────────────────────────────────── */
9
+
10
+ function createPipe(name) {
11
+ const ee = new EventEmitter();
12
+ return {
13
+ name,
14
+ send(packet) {
15
+ ee.emit('data', packet);
16
+ },
17
+ on(fn) {
18
+ ee.on('data', fn);
19
+ },
20
+ dispose() {
21
+ ee.removeAllListeners();
22
+ }
23
+ };
24
+ }
25
+
26
+ /* ─────────────────────────────────────────────
27
+ * Worker pool (optional)
28
+ * ───────────────────────────────────────────── */
29
+
30
+ function createWorkerPool(size) {
31
+ if (size === undefined) return null;
32
+
33
+ const workers = [];
34
+ let cursor = 0;
35
+
36
+ for (let i = 0; i < size; i++) {
37
+ workers.push(
38
+ new Worker(
39
+ `
40
+ const { parentPort } = require('worker_threads');
41
+ parentPort.on('message', async ({ fn, packet }) => {
42
+ try {
43
+ const filter = eval(fn);
44
+ await filter(
45
+ out => parentPort.postMessage({ out }),
46
+ packet
47
+ );
48
+ } catch (err) {
49
+ parentPort.postMessage({ error: err.message });
50
+ }
51
+ });
52
+ `,
53
+ { eval: true }
54
+ )
55
+ );
56
+ }
57
+
58
+ return {
59
+ exec(filter, packet, send) {
60
+ const worker = workers[cursor++ % workers.length];
61
+ worker.once('message', msg => {
62
+ if (msg.error) throw new Error(msg.error);
63
+ send(msg.out);
64
+ });
65
+ worker.postMessage({ fn: filter.toString(), packet });
66
+ },
67
+ dispose() {
68
+ workers.forEach(w => w.terminate());
69
+ }
70
+ };
71
+ }
72
+
73
+ /* ─────────────────────────────────────────────
74
+ * Internal auto-join (implicit barrier)
75
+ * ───────────────────────────────────────────── */
76
+
77
+ function autoJoin(count, next) {
78
+ let buffer = [];
79
+
80
+ return packet => {
81
+ buffer.push(packet);
82
+ if (buffer.length === count) {
83
+ const joined = {
84
+ ...packet,
85
+ branches: buffer
86
+ };
87
+ buffer = [];
88
+ next(joined);
89
+ }
90
+ };
91
+ }
92
+
93
+ /* ─────────────────────────────────────────────
94
+ * Filter execution (returns promise for async filters)
95
+ * ───────────────────────────────────────────── */
96
+
97
+ function runFilter(filter, packet, send, pool) {
98
+ if (pool) {
99
+ return new Promise(resolve => {
100
+ pool.exec(filter, packet, out => {
101
+ send(out);
102
+ resolve();
103
+ });
104
+ });
105
+ } else {
106
+ const res = filter(send, packet);
107
+ if (res?.then) {
108
+ return res.catch(console.error);
109
+ }
110
+ return null; // Sync filter
111
+ }
112
+ }
113
+
114
+ /* ─────────────────────────────────────────────
115
+ * Stage compiler
116
+ * ───────────────────────────────────────────── */
117
+
118
+ function compileStages(stages) {
119
+ return stages.map(stage =>
120
+ Array.isArray(stage)
121
+ ? { type: 'parallel', filters: stage }
122
+ : { type: 'series', filter: stage }
123
+ );
124
+ }
125
+
126
+ /* ─────────────────────────────────────────────
127
+ * Edge runner (async-aware)
128
+ * ───────────────────────────────────────────── */
129
+
130
+ function connectEdge(input, stages, output, ctx, pool) {
131
+ input.on(async packet => {
132
+ let current = packet;
133
+
134
+ for (let i = 0; i < stages.length; i++) {
135
+ const stage = stages[i];
136
+
137
+ // Series stage
138
+ if (stage.type === 'series') {
139
+ let nextPacket;
140
+ const result = runFilter(
141
+ stage.filter,
142
+ { ...current, ...ctx },
143
+ out => (nextPacket = out),
144
+ pool
145
+ );
146
+ // Await if async
147
+ if (result?.then) await result;
148
+ current = nextPacket;
149
+ continue;
150
+ }
151
+
152
+ // Parallel stage (auto-joined)
153
+ if (stage.type === 'parallel') {
154
+ const join = autoJoin(stage.filters.length, async joined => {
155
+ let downstream = joined;
156
+
157
+ // Continue remaining stages after join
158
+ for (let j = i + 1; j < stages.length; j++) {
159
+ const s = stages[j];
160
+ if (s.type !== 'series') {
161
+ throw new Error('Nested parallel not supported');
162
+ }
163
+ const result = runFilter(
164
+ s.filter,
165
+ downstream,
166
+ out => (downstream = out),
167
+ pool
168
+ );
169
+ if (result?.then) await result;
170
+ }
171
+
172
+ output.send(downstream);
173
+ });
174
+
175
+ stage.filters.forEach(f =>
176
+ runFilter(
177
+ f,
178
+ { ...current, ...ctx },
179
+ join,
180
+ pool
181
+ )
182
+ );
183
+ return;
184
+ }
185
+ }
186
+
187
+ output.send(current);
188
+ });
189
+ }
190
+
191
+ /* ─────────────────────────────────────────────
192
+ * Flow (filtergraph)
193
+ * ───────────────────────────────────────────── */
194
+
195
+ export function flow(definition, options = {}) {
196
+ const pipes = new Map();
197
+ const pool = createWorkerPool(
198
+ options.workers ?? undefined
199
+ );
200
+ const ctx = options.context || {};
201
+ const producers = [];
202
+
203
+ const getPipe = name => {
204
+ if (!pipes.has(name)) pipes.set(name, createPipe(name));
205
+ return pipes.get(name);
206
+ };
207
+
208
+ // First pass: set up all edges (listeners)
209
+ for (const edge of definition) {
210
+ // Producer → pipe (defer execution)
211
+ if (typeof edge[0] === 'function') {
212
+ const producer = edge[0];
213
+ const pipe = getPipe(edge[1]);
214
+ producers.push(() => producer(packet => pipe.send({ ...packet, ...ctx })));
215
+ continue;
216
+ }
217
+
218
+ // Filtergraph edge
219
+ const [inputName, ...rest] = edge;
220
+ const outputName = rest.pop();
221
+ const stages = compileStages(rest);
222
+
223
+ connectEdge(
224
+ getPipe(inputName),
225
+ stages,
226
+ getPipe(outputName),
227
+ ctx,
228
+ pool
229
+ );
230
+ }
231
+
232
+ // Auto-start producers after all edges are connected
233
+ queueMicrotask(() => {
234
+ producers.forEach(p => p());
235
+ });
236
+
237
+ return {
238
+ start() {
239
+ // Manual start if needed (producers auto-start on next tick)
240
+ },
241
+ dispose() {
242
+ pipes.forEach(p => p.dispose());
243
+ pool?.dispose();
244
+ }
245
+ };
246
+ }
package/package.json ADDED
@@ -0,0 +1,13 @@
1
+ {
2
+ "name": "muriel",
3
+ "version": "1.0.0",
4
+ "description": "Lightweight Filtergraph Flow Engine",
5
+ "main": "index.js",
6
+ "scripts": {
7
+ "test": "node test.js"
8
+ },
9
+ "keywords": [],
10
+ "author": "",
11
+ "license": "MIT",
12
+ "type": "module"
13
+ }
package/test.js ADDED
@@ -0,0 +1,524 @@
1
+ // test.js - Tests for the muriel filtergraph flow engine
2
+ import { flow } from './index.js';
3
+
4
+ let passed = 0;
5
+ let failed = 0;
6
+
7
+ async function test(name, fn) {
8
+ try {
9
+ await fn();
10
+ console.log(`✓ ${name}`);
11
+ passed++;
12
+ } catch (err) {
13
+ console.log(`✗ ${name}`);
14
+ console.log(` ${err.message}`);
15
+ failed++;
16
+ }
17
+ }
18
+
19
+ function assert(condition, message) {
20
+ if (!condition) throw new Error(message || 'Assertion failed');
21
+ }
22
+
23
+ function assertEqual(actual, expected, message) {
24
+ if (actual !== expected) {
25
+ throw new Error(message || `Expected ${expected}, got ${actual}`);
26
+ }
27
+ }
28
+
29
+ // Helper to wait for microtask queue to flush
30
+ const tick = () => new Promise(r => queueMicrotask(r));
31
+
32
+ // ─────────────────────────────────────────────
33
+ // Test: Basic producer → pipe
34
+ // ─────────────────────────────────────────────
35
+
36
+ await test('Producer sends to named pipe', async () => {
37
+ let received = null;
38
+
39
+ function producer(send) {
40
+ send({ value: 42 });
41
+ }
42
+
43
+ function consumer() {
44
+ return (send, packet) => {
45
+ received = packet.value;
46
+ send(packet);
47
+ };
48
+ }
49
+
50
+ const graph = flow([
51
+ [producer, 'input'],
52
+ ['input', consumer(), 'output']
53
+ ]);
54
+
55
+ await tick();
56
+ assertEqual(received, 42, 'Consumer should receive value 42');
57
+ graph.dispose();
58
+ });
59
+
60
+ // ─────────────────────────────────────────────
61
+ // Test: Series pipeline
62
+ // ─────────────────────────────────────────────
63
+
64
+ await test('Series pipeline processes in order', async () => {
65
+ const steps = [];
66
+
67
+ function producer(send) {
68
+ send({ value: 1 });
69
+ }
70
+
71
+ function step1() {
72
+ return (send, packet) => {
73
+ steps.push('step1');
74
+ send({ ...packet, value: packet.value + 1 });
75
+ };
76
+ }
77
+
78
+ function step2() {
79
+ return (send, packet) => {
80
+ steps.push('step2');
81
+ send({ ...packet, value: packet.value * 2 });
82
+ };
83
+ }
84
+
85
+ function step3() {
86
+ return (send, packet) => {
87
+ steps.push('step3');
88
+ send({ ...packet, value: packet.value + 10 });
89
+ };
90
+ }
91
+
92
+ let result = null;
93
+ function collector() {
94
+ return (send, packet) => {
95
+ result = packet.value;
96
+ send(packet);
97
+ };
98
+ }
99
+
100
+ const graph = flow([
101
+ [producer, 'in'],
102
+ ['in', step1(), step2(), step3(), collector(), 'out']
103
+ ]);
104
+
105
+ await tick();
106
+ assertEqual(steps.join('→'), 'step1→step2→step3', 'Steps should execute in order');
107
+ assertEqual(result, 14, '(1+1)*2+10 = 14');
108
+ graph.dispose();
109
+ });
110
+
111
+ // ─────────────────────────────────────────────
112
+ // Test: Parallel stages with auto-join
113
+ // ─────────────────────────────────────────────
114
+
115
+ await test('Parallel stages auto-join', async () => {
116
+ let joinedPacket = null;
117
+
118
+ function producer(send) {
119
+ send({ id: 'test' });
120
+ }
121
+
122
+ function branchA() {
123
+ return (send, packet) => {
124
+ send({ ...packet, a: 'processed-A' });
125
+ };
126
+ }
127
+
128
+ function branchB() {
129
+ return (send, packet) => {
130
+ send({ ...packet, b: 'processed-B' });
131
+ };
132
+ }
133
+
134
+ function branchC() {
135
+ return (send, packet) => {
136
+ send({ ...packet, c: 'processed-C' });
137
+ };
138
+ }
139
+
140
+ function collector() {
141
+ return (send, packet) => {
142
+ joinedPacket = packet;
143
+ send(packet);
144
+ };
145
+ }
146
+
147
+ const graph = flow([
148
+ [producer, 'in'],
149
+ ['in', [branchA(), branchB(), branchC()], collector(), 'out']
150
+ ]);
151
+
152
+ await tick();
153
+ assert(joinedPacket !== null, 'Joined packet should exist');
154
+ assert(joinedPacket.branches, 'Joined packet should have branches array');
155
+ assertEqual(joinedPacket.branches.length, 3, 'Should have 3 branches');
156
+
157
+ const hasA = joinedPacket.branches.some(b => b.a === 'processed-A');
158
+ const hasB = joinedPacket.branches.some(b => b.b === 'processed-B');
159
+ const hasC = joinedPacket.branches.some(b => b.c === 'processed-C');
160
+
161
+ assert(hasA && hasB && hasC, 'All branches should be present');
162
+ graph.dispose();
163
+ });
164
+
165
+ // ─────────────────────────────────────────────
166
+ // Test: Parallel → Series flow
167
+ // ─────────────────────────────────────────────
168
+
169
+ await test('Parallel stages followed by series stages', async () => {
170
+ const executionOrder = [];
171
+ let finalResult = null;
172
+
173
+ function producer(send) {
174
+ send({ value: 10 });
175
+ }
176
+
177
+ function parallelA() {
178
+ return (send, packet) => {
179
+ executionOrder.push('parallelA');
180
+ send({ ...packet, fromA: packet.value * 2 });
181
+ };
182
+ }
183
+
184
+ function parallelB() {
185
+ return (send, packet) => {
186
+ executionOrder.push('parallelB');
187
+ send({ ...packet, fromB: packet.value * 3 });
188
+ };
189
+ }
190
+
191
+ function afterJoin() {
192
+ return (send, packet) => {
193
+ executionOrder.push('afterJoin');
194
+ send({ ...packet, afterJoin: true });
195
+ };
196
+ }
197
+
198
+ function final() {
199
+ return (send, packet) => {
200
+ executionOrder.push('final');
201
+ finalResult = packet;
202
+ send(packet);
203
+ };
204
+ }
205
+
206
+ const graph = flow([
207
+ [producer, 'in'],
208
+ ['in', [parallelA(), parallelB()], afterJoin(), final(), 'out']
209
+ ]);
210
+
211
+ await tick();
212
+ assert(executionOrder.includes('parallelA'), 'parallelA should execute');
213
+ assert(executionOrder.includes('parallelB'), 'parallelB should execute');
214
+ assert(executionOrder.indexOf('afterJoin') > executionOrder.indexOf('parallelA'), 'afterJoin should come after parallel');
215
+ assert(executionOrder.indexOf('final') > executionOrder.indexOf('afterJoin'), 'final should come after afterJoin');
216
+ assert(finalResult.afterJoin === true, 'afterJoin flag should be set');
217
+ graph.dispose();
218
+ });
219
+
220
+ // ─────────────────────────────────────────────
221
+ // Test: Context propagation
222
+ // ─────────────────────────────────────────────
223
+
224
+ await test('Context is propagated to all stages', async () => {
225
+ let receivedContext = null;
226
+
227
+ function producer(send) {
228
+ send({ data: 'test' });
229
+ }
230
+
231
+ function reader() {
232
+ return (send, packet) => {
233
+ receivedContext = { user: packet.user, env: packet.env };
234
+ send(packet);
235
+ };
236
+ }
237
+
238
+ const graph = flow(
239
+ [
240
+ [producer, 'in'],
241
+ ['in', reader(), 'out']
242
+ ],
243
+ {
244
+ context: { user: 'alice', env: 'production' }
245
+ }
246
+ );
247
+
248
+ await tick();
249
+ assertEqual(receivedContext.user, 'alice', 'User context should be alice');
250
+ assertEqual(receivedContext.env, 'production', 'Env context should be production');
251
+ graph.dispose();
252
+ });
253
+
254
+ // ─────────────────────────────────────────────
255
+ // Test: Multiple edges in graph
256
+ // ─────────────────────────────────────────────
257
+
258
+ await test('Multiple edges can share pipes', async () => {
259
+ let firstResult = null;
260
+ let secondResult = null;
261
+
262
+ function producer(send) {
263
+ send({ value: 5 });
264
+ }
265
+
266
+ function doubler() {
267
+ return (send, packet) => {
268
+ send({ ...packet, value: packet.value * 2 });
269
+ };
270
+ }
271
+
272
+ function firstCollector() {
273
+ return (send, packet) => {
274
+ firstResult = packet.value;
275
+ send(packet);
276
+ };
277
+ }
278
+
279
+ function adder() {
280
+ return (send, packet) => {
281
+ send({ ...packet, value: packet.value + 100 });
282
+ };
283
+ }
284
+
285
+ function secondCollector() {
286
+ return (send, packet) => {
287
+ secondResult = packet.value;
288
+ send(packet);
289
+ };
290
+ }
291
+
292
+ const graph = flow([
293
+ [producer, 'in'],
294
+ ['in', doubler(), firstCollector(), 'middle'],
295
+ ['middle', adder(), secondCollector(), 'out']
296
+ ]);
297
+
298
+ await tick();
299
+ assertEqual(firstResult, 10, 'First edge: 5 * 2 = 10');
300
+ assertEqual(secondResult, 110, 'Second edge: 10 + 100 = 110');
301
+ graph.dispose();
302
+ });
303
+
304
+ // ─────────────────────────────────────────────
305
+ // Test: Disposal cleans up resources
306
+ // ─────────────────────────────────────────────
307
+
308
+ await test('Disposal prevents further events', async () => {
309
+ let count = 0;
310
+ let sendFn = null;
311
+
312
+ function producer(send) {
313
+ sendFn = send;
314
+ send({ value: 1 });
315
+ }
316
+
317
+ function counter() {
318
+ return (send, packet) => {
319
+ count++;
320
+ send(packet);
321
+ };
322
+ }
323
+
324
+ const graph = flow([
325
+ [producer, 'in'],
326
+ ['in', counter(), 'out']
327
+ ]);
328
+
329
+ await tick();
330
+ assertEqual(count, 1, 'Should have received 1 event');
331
+ graph.dispose();
332
+
333
+ // Try sending after disposal - should not increment
334
+ try {
335
+ sendFn({ value: 2 });
336
+ } catch (e) {
337
+ // Expected
338
+ }
339
+ // The important thing is dispose doesn't throw
340
+ });
341
+
342
+ // ─────────────────────────────────────────────
343
+ // Test: Async transforms
344
+ // ─────────────────────────────────────────────
345
+
346
+ await test('Async transforms are supported', async () => {
347
+ let result = null;
348
+
349
+ function producer(send) {
350
+ send({ value: 1 });
351
+ }
352
+
353
+ function asyncDouble() {
354
+ return async (send, packet) => {
355
+ await new Promise(r => setTimeout(r, 10));
356
+ send({ ...packet, value: packet.value * 2 });
357
+ };
358
+ }
359
+
360
+ function collector() {
361
+ return (send, packet) => {
362
+ result = packet.value;
363
+ send(packet);
364
+ };
365
+ }
366
+
367
+ const graph = flow([
368
+ [producer, 'in'],
369
+ ['in', asyncDouble(), collector(), 'out']
370
+ ]);
371
+
372
+ // Wait for async operations
373
+ await new Promise(r => setTimeout(r, 50));
374
+
375
+ assertEqual(result, 2, 'Async transform should double the value');
376
+ graph.dispose();
377
+ });
378
+
379
+ // ─────────────────────────────────────────────
380
+ // Test: Direct pipe connection with no stages
381
+ // ─────────────────────────────────────────────
382
+
383
+ await test('Direct pipe connection with one stage', async () => {
384
+ let received = null;
385
+
386
+ function producer(send) {
387
+ send({ value: 'passthrough' });
388
+ }
389
+
390
+ function collector() {
391
+ return (send, packet) => {
392
+ received = packet.value;
393
+ send(packet);
394
+ };
395
+ }
396
+
397
+ const graph = flow([
398
+ [producer, 'in'],
399
+ ['in', collector(), 'out']
400
+ ]);
401
+
402
+ await tick();
403
+ assertEqual(received, 'passthrough', 'Value should pass through');
404
+ graph.dispose();
405
+ });
406
+
407
+ // ─────────────────────────────────────────────
408
+ // Test: Blog builder simulation
409
+ // ─────────────────────────────────────────────
410
+
411
+ await test('Blog builder pattern simulation', async () => {
412
+ const processed = { cover: false, audio: false, post: false, verified: false, backed: false };
413
+ let finalPacket = null;
414
+
415
+ function socket(channel) {
416
+ return send => {
417
+ send({ payload: { type: 'new-post', id: 'post-123' }, topic: channel });
418
+ };
419
+ }
420
+
421
+ function cover() {
422
+ return (send, packet) => {
423
+ processed.cover = true;
424
+ send({ ...packet, coverPath: '/covers/post-123.avif' });
425
+ };
426
+ }
427
+
428
+ function audio() {
429
+ return (send, packet) => {
430
+ processed.audio = true;
431
+ send({ ...packet, audioPath: '/audio/post-123.mp3' });
432
+ };
433
+ }
434
+
435
+ function post() {
436
+ return (send, packet) => {
437
+ processed.post = true;
438
+ send({ ...packet, postPath: '/posts/post-123.html' });
439
+ };
440
+ }
441
+
442
+ function verify() {
443
+ return (send, packet) => {
444
+ processed.verified = true;
445
+ const allBranches = packet.branches || [];
446
+ const hasCover = allBranches.some(b => b.coverPath);
447
+ const hasAudio = allBranches.some(b => b.audioPath);
448
+ const hasPost = allBranches.some(b => b.postPath);
449
+ send({ ...packet, valid: hasCover && hasAudio && hasPost });
450
+ };
451
+ }
452
+
453
+ function backup() {
454
+ return (send, packet) => {
455
+ processed.backed = true;
456
+ send({ ...packet, backedUp: true });
457
+ };
458
+ }
459
+
460
+ function collector() {
461
+ return (send, packet) => {
462
+ finalPacket = packet;
463
+ send(packet);
464
+ };
465
+ }
466
+
467
+ const blog = flow(
468
+ [
469
+ [socket('post'), 'post'],
470
+ ['post', [cover(), audio(), post()], verify(), backup(), collector(), 'updated']
471
+ ],
472
+ {
473
+ context: { user: 'alice' }
474
+ }
475
+ );
476
+
477
+ await tick();
478
+ assert(processed.cover, 'Cover should be processed');
479
+ assert(processed.audio, 'Audio should be processed');
480
+ assert(processed.post, 'Post should be processed');
481
+ assert(processed.verified, 'Should be verified');
482
+ assert(processed.backed, 'Should be backed up');
483
+ assert(finalPacket.valid, 'Final packet should be valid');
484
+ assertEqual(finalPacket.user, 'alice', 'Context should be available');
485
+ blog.dispose();
486
+ });
487
+
488
+ // ─────────────────────────────────────────────
489
+ // Test: Delayed producer (setTimeout)
490
+ // ─────────────────────────────────────────────
491
+
492
+ await test('Delayed producer with setTimeout', async () => {
493
+ let received = null;
494
+
495
+ function delayedProducer(send) {
496
+ setTimeout(() => send({ value: 'delayed' }), 20);
497
+ }
498
+
499
+ function collector() {
500
+ return (send, packet) => {
501
+ received = packet.value;
502
+ send(packet);
503
+ };
504
+ }
505
+
506
+ const graph = flow([
507
+ [delayedProducer, 'in'],
508
+ ['in', collector(), 'out']
509
+ ]);
510
+
511
+ await new Promise(r => setTimeout(r, 50));
512
+ assertEqual(received, 'delayed', 'Should receive delayed value');
513
+ graph.dispose();
514
+ });
515
+
516
+ // ─────────────────────────────────────────────
517
+ // Summary
518
+ // ─────────────────────────────────────────────
519
+
520
+ console.log('\n─────────────────────────────────────────────');
521
+ console.log(`Tests: ${passed + failed} | Passed: ${passed} | Failed: ${failed}`);
522
+ console.log('─────────────────────────────────────────────');
523
+
524
+ process.exit(failed > 0 ? 1 : 0);