@dojocho/effect-ts 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. package/DOJO.md +22 -0
  2. package/dojo.json +50 -0
  3. package/katas/001-hello-effect/SENSEI.md +72 -0
  4. package/katas/001-hello-effect/solution.test.ts +35 -0
  5. package/katas/001-hello-effect/solution.ts +16 -0
  6. package/katas/002-transform-with-map/SENSEI.md +72 -0
  7. package/katas/002-transform-with-map/solution.test.ts +33 -0
  8. package/katas/002-transform-with-map/solution.ts +16 -0
  9. package/katas/003-generator-pipelines/SENSEI.md +72 -0
  10. package/katas/003-generator-pipelines/solution.test.ts +40 -0
  11. package/katas/003-generator-pipelines/solution.ts +29 -0
  12. package/katas/004-flatmap-and-chaining/SENSEI.md +80 -0
  13. package/katas/004-flatmap-and-chaining/solution.test.ts +34 -0
  14. package/katas/004-flatmap-and-chaining/solution.ts +18 -0
  15. package/katas/005-pipe-composition/SENSEI.md +81 -0
  16. package/katas/005-pipe-composition/solution.test.ts +41 -0
  17. package/katas/005-pipe-composition/solution.ts +19 -0
  18. package/katas/006-handle-errors/SENSEI.md +86 -0
  19. package/katas/006-handle-errors/solution.test.ts +53 -0
  20. package/katas/006-handle-errors/solution.ts +30 -0
  21. package/katas/007-tagged-errors/SENSEI.md +79 -0
  22. package/katas/007-tagged-errors/solution.test.ts +82 -0
  23. package/katas/007-tagged-errors/solution.ts +37 -0
  24. package/katas/008-error-patterns/SENSEI.md +89 -0
  25. package/katas/008-error-patterns/solution.test.ts +41 -0
  26. package/katas/008-error-patterns/solution.ts +38 -0
  27. package/katas/009-option-type/SENSEI.md +96 -0
  28. package/katas/009-option-type/solution.test.ts +49 -0
  29. package/katas/009-option-type/solution.ts +26 -0
  30. package/katas/010-either-and-exit/SENSEI.md +86 -0
  31. package/katas/010-either-and-exit/solution.test.ts +33 -0
  32. package/katas/010-either-and-exit/solution.ts +17 -0
  33. package/katas/011-services-and-context/SENSEI.md +82 -0
  34. package/katas/011-services-and-context/solution.test.ts +23 -0
  35. package/katas/011-services-and-context/solution.ts +17 -0
  36. package/katas/012-layers/SENSEI.md +73 -0
  37. package/katas/012-layers/solution.test.ts +23 -0
  38. package/katas/012-layers/solution.ts +26 -0
  39. package/katas/013-testing-effects/SENSEI.md +88 -0
  40. package/katas/013-testing-effects/solution.test.ts +41 -0
  41. package/katas/013-testing-effects/solution.ts +20 -0
  42. package/katas/014-schema-basics/SENSEI.md +81 -0
  43. package/katas/014-schema-basics/solution.test.ts +35 -0
  44. package/katas/014-schema-basics/solution.ts +25 -0
  45. package/katas/015-domain-modeling/SENSEI.md +85 -0
  46. package/katas/015-domain-modeling/solution.test.ts +46 -0
  47. package/katas/015-domain-modeling/solution.ts +42 -0
  48. package/katas/016-retry-and-schedule/SENSEI.md +72 -0
  49. package/katas/016-retry-and-schedule/solution.test.ts +26 -0
  50. package/katas/016-retry-and-schedule/solution.ts +23 -0
  51. package/katas/017-parallel-effects/SENSEI.md +70 -0
  52. package/katas/017-parallel-effects/solution.test.ts +33 -0
  53. package/katas/017-parallel-effects/solution.ts +17 -0
  54. package/katas/018-race-and-timeout/SENSEI.md +75 -0
  55. package/katas/018-race-and-timeout/solution.test.ts +30 -0
  56. package/katas/018-race-and-timeout/solution.ts +27 -0
  57. package/katas/019-ref-and-state/SENSEI.md +72 -0
  58. package/katas/019-ref-and-state/solution.test.ts +29 -0
  59. package/katas/019-ref-and-state/solution.ts +16 -0
  60. package/katas/020-fibers/SENSEI.md +80 -0
  61. package/katas/020-fibers/solution.test.ts +23 -0
  62. package/katas/020-fibers/solution.ts +23 -0
  63. package/katas/021-acquire-release/SENSEI.md +57 -0
  64. package/katas/021-acquire-release/solution.test.ts +23 -0
  65. package/katas/021-acquire-release/solution.ts +22 -0
  66. package/katas/022-scoped-layers/SENSEI.md +52 -0
  67. package/katas/022-scoped-layers/solution.test.ts +35 -0
  68. package/katas/022-scoped-layers/solution.ts +19 -0
  69. package/katas/023-resource-patterns/SENSEI.md +52 -0
  70. package/katas/023-resource-patterns/solution.test.ts +20 -0
  71. package/katas/023-resource-patterns/solution.ts +13 -0
  72. package/katas/024-streams-basics/SENSEI.md +61 -0
  73. package/katas/024-streams-basics/solution.test.ts +30 -0
  74. package/katas/024-streams-basics/solution.ts +16 -0
  75. package/katas/025-stream-operations/SENSEI.md +59 -0
  76. package/katas/025-stream-operations/solution.test.ts +26 -0
  77. package/katas/025-stream-operations/solution.ts +17 -0
  78. package/katas/026-combining-streams/SENSEI.md +54 -0
  79. package/katas/026-combining-streams/solution.test.ts +20 -0
  80. package/katas/026-combining-streams/solution.ts +16 -0
  81. package/katas/027-data-pipelines/SENSEI.md +58 -0
  82. package/katas/027-data-pipelines/solution.test.ts +22 -0
  83. package/katas/027-data-pipelines/solution.ts +16 -0
  84. package/katas/028-logging-and-spans/SENSEI.md +58 -0
  85. package/katas/028-logging-and-spans/solution.test.ts +50 -0
  86. package/katas/028-logging-and-spans/solution.ts +20 -0
  87. package/katas/029-http-client/SENSEI.md +59 -0
  88. package/katas/029-http-client/solution.test.ts +49 -0
  89. package/katas/029-http-client/solution.ts +24 -0
  90. package/katas/030-capstone/SENSEI.md +63 -0
  91. package/katas/030-capstone/solution.test.ts +67 -0
  92. package/katas/030-capstone/solution.ts +55 -0
  93. package/katas/031-config-and-environment/SENSEI.md +77 -0
  94. package/katas/031-config-and-environment/solution.test.ts +38 -0
  95. package/katas/031-config-and-environment/solution.ts +11 -0
  96. package/katas/032-cause-and-defects/SENSEI.md +90 -0
  97. package/katas/032-cause-and-defects/solution.test.ts +50 -0
  98. package/katas/032-cause-and-defects/solution.ts +23 -0
  99. package/katas/033-pattern-matching/SENSEI.md +86 -0
  100. package/katas/033-pattern-matching/solution.test.ts +36 -0
  101. package/katas/033-pattern-matching/solution.ts +28 -0
  102. package/katas/034-deferred-and-coordination/SENSEI.md +85 -0
  103. package/katas/034-deferred-and-coordination/solution.test.ts +25 -0
  104. package/katas/034-deferred-and-coordination/solution.ts +24 -0
  105. package/katas/035-queue-and-backpressure/SENSEI.md +100 -0
  106. package/katas/035-queue-and-backpressure/solution.test.ts +25 -0
  107. package/katas/035-queue-and-backpressure/solution.ts +21 -0
  108. package/katas/036-schema-advanced/SENSEI.md +81 -0
  109. package/katas/036-schema-advanced/solution.test.ts +55 -0
  110. package/katas/036-schema-advanced/solution.ts +19 -0
  111. package/katas/037-cache-and-memoization/SENSEI.md +73 -0
  112. package/katas/037-cache-and-memoization/solution.test.ts +47 -0
  113. package/katas/037-cache-and-memoization/solution.ts +24 -0
  114. package/katas/038-metrics/SENSEI.md +91 -0
  115. package/katas/038-metrics/solution.test.ts +39 -0
  116. package/katas/038-metrics/solution.ts +23 -0
  117. package/katas/039-managed-runtime/SENSEI.md +75 -0
  118. package/katas/039-managed-runtime/solution.test.ts +29 -0
  119. package/katas/039-managed-runtime/solution.ts +19 -0
  120. package/katas/040-request-batching/SENSEI.md +87 -0
  121. package/katas/040-request-batching/solution.test.ts +56 -0
  122. package/katas/040-request-batching/solution.ts +32 -0
  123. package/package.json +22 -0
  124. package/skills/effect-patterns-building-apis/SKILL.md +2393 -0
  125. package/skills/effect-patterns-building-data-pipelines/SKILL.md +1876 -0
  126. package/skills/effect-patterns-concurrency/SKILL.md +2999 -0
  127. package/skills/effect-patterns-concurrency-getting-started/SKILL.md +351 -0
  128. package/skills/effect-patterns-core-concepts/SKILL.md +3199 -0
  129. package/skills/effect-patterns-domain-modeling/SKILL.md +1385 -0
  130. package/skills/effect-patterns-error-handling/SKILL.md +1212 -0
  131. package/skills/effect-patterns-error-handling-resilience/SKILL.md +179 -0
  132. package/skills/effect-patterns-error-management/SKILL.md +1668 -0
  133. package/skills/effect-patterns-getting-started/SKILL.md +237 -0
  134. package/skills/effect-patterns-making-http-requests/SKILL.md +1756 -0
  135. package/skills/effect-patterns-observability/SKILL.md +1586 -0
  136. package/skills/effect-patterns-platform/SKILL.md +1195 -0
  137. package/skills/effect-patterns-platform-getting-started/SKILL.md +179 -0
  138. package/skills/effect-patterns-project-setup--execution/SKILL.md +233 -0
  139. package/skills/effect-patterns-resource-management/SKILL.md +827 -0
  140. package/skills/effect-patterns-scheduling/SKILL.md +451 -0
  141. package/skills/effect-patterns-scheduling-periodic-tasks/SKILL.md +763 -0
  142. package/skills/effect-patterns-streams/SKILL.md +2052 -0
  143. package/skills/effect-patterns-streams-getting-started/SKILL.md +421 -0
  144. package/skills/effect-patterns-streams-sinks/SKILL.md +1181 -0
  145. package/skills/effect-patterns-testing/SKILL.md +1632 -0
  146. package/skills/effect-patterns-tooling-and-debugging/SKILL.md +1125 -0
  147. package/skills/effect-patterns-value-handling/SKILL.md +676 -0
  148. package/tsconfig.json +20 -0
  149. package/vitest.config.ts +3 -0
@@ -0,0 +1,2052 @@
1
+ ---
2
+ name: effect-patterns-streams
3
+ description: Effect-TS patterns for Streams. Use when working with streams in Effect-TS applications.
4
+ ---
5
+ # Effect-TS Patterns: Streams
6
+ This skill provides 8 curated Effect-TS patterns for streams.
7
+ Use this skill when working on tasks related to:
8
+ - streams
9
+ - Best practices in Effect-TS applications
10
+ - Real-world patterns and solutions
11
+
12
+ ---
13
+
14
+ ## 🟢 Beginner Patterns
15
+
16
+ ### Stream Pattern 1: Transform Streams with Map and Filter
17
+
18
+ **Rule:** Use map and filter combinators to transform stream elements declaratively, creating pipelines that reshape data without materializing intermediate results.
19
+
20
+ **Good Example:**
21
+
22
+ This example demonstrates transforming a stream of raw data through multiple stages.
23
+
24
+ ```typescript
25
+ import { Stream, Effect, Chunk } from "effect";
26
+
27
+ interface RawLogEntry {
28
+ readonly timestamp: string;
29
+ readonly level: string;
30
+ readonly message: string;
31
+ }
32
+
33
+ interface ProcessedLog {
34
+ readonly date: Date;
35
+ readonly severity: "low" | "medium" | "high";
36
+ readonly normalizedMessage: string;
37
+ }
38
+
39
+ // Create a stream of raw log entries
40
+ const createLogStream = (): Stream.Stream<RawLogEntry> =>
41
+ Stream.fromIterable([
42
+ { timestamp: "2025-12-17T09:00:00Z", level: "DEBUG", message: "App starting" },
43
+ { timestamp: "2025-12-17T09:01:00Z", level: "INFO", message: "Connected to DB" },
44
+ { timestamp: "2025-12-17T09:02:00Z", level: "ERROR", message: "Query timeout" },
45
+ { timestamp: "2025-12-17T09:03:00Z", level: "DEBUG", message: "Retry initiated" },
46
+ { timestamp: "2025-12-17T09:04:00Z", level: "WARN", message: "Connection degraded" },
47
+ { timestamp: "2025-12-17T09:05:00Z", level: "INFO", message: "Recovered" },
48
+ ]);
49
+
50
+ // Transform: Parse timestamp
51
+ const parseTimestamp = (entry: RawLogEntry): RawLogEntry => ({
52
+ ...entry,
53
+ timestamp: entry.timestamp, // Already ISO, but could parse here
54
+ });
55
+
56
+ // Transform: Map log level to severity
57
+ const mapSeverity = (level: string): "low" | "medium" | "high" => {
58
+ if (level === "DEBUG" || level === "INFO") return "low";
59
+ if (level === "WARN") return "medium";
60
+ return "high";
61
+ };
62
+
63
+ // Transform: Normalize message
64
+ const normalizeMessage = (message: string): string =>
65
+ message.toLowerCase().trim();
66
+
67
+ // Filter: Keep only important logs
68
+ const isImportant = (entry: RawLogEntry): boolean => {
69
+ return entry.level !== "DEBUG";
70
+ };
71
+
72
+ // Main pipeline
73
+ const program = Effect.gen(function* () {
74
+ console.log(`\n[STREAM] Processing log stream with map/filter\n`);
75
+
76
+ // Create and transform stream
77
+ const transformedStream = createLogStream().pipe(
78
+ // Filter: Keep only non-debug logs
79
+ Stream.filter((entry) => {
80
+ const important = isImportant(entry);
81
+ console.log(
82
+ `[FILTER] ${entry.level} → ${important ? "✓ kept" : "✗ filtered out"}`
83
+ );
84
+ return important;
85
+ }),
86
+
87
+ // Map: Extract date
88
+ Stream.map((entry) => {
89
+ const date = new Date(entry.timestamp);
90
+ console.log(`[MAP-1] Parsed date: ${date.toISOString()}`);
91
+ return { ...entry, parsedDate: date };
92
+ }),
93
+
94
+ // Map: Normalize and map severity
95
+ Stream.map((entry) => {
96
+ const processed: ProcessedLog = {
97
+ date: entry.parsedDate,
98
+ severity: mapSeverity(entry.level),
99
+ normalizedMessage: normalizeMessage(entry.message),
100
+ };
101
+ console.log(
102
+ `[MAP-2] Transformed: ${entry.level} → ${processed.severity}`
103
+ );
104
+ return processed;
105
+ })
106
+ );
107
+
108
+ // Collect all transformed logs
109
+ const results = yield* transformedStream.pipe(
110
+ Stream.runCollect
111
+ );
112
+
113
+ console.log(`\n[RESULTS]`);
114
+ console.log(` Total logs: ${results.length}`);
115
+
116
+ Chunk.forEach(results, (log) => {
117
+ console.log(
118
+ ` - [${log.severity.toUpperCase()}] ${log.date.toISOString()}: ${log.normalizedMessage}`
119
+ );
120
+ });
121
+ });
122
+
123
+ Effect.runPromise(program);
124
+ ```
125
+
126
+ Output shows lazy evaluation and filtering:
127
+ ```
128
+ [STREAM] Processing log stream with map/filter
129
+
130
+ [FILTER] DEBUG → ✗ filtered out
131
+ [FILTER] INFO → ✓ kept
132
+ [MAP-1] Parsed date: 2025-12-17T09:01:00.000Z
133
+ [MAP-2] Transformed: INFO → low
134
+ [FILTER] ERROR → ✓ kept
135
+ [MAP-1] Parsed date: 2025-12-17T09:02:00.000Z
136
+ [MAP-2] Transformed: ERROR → high
137
+ ...
138
+
139
+ [RESULTS]
140
+ Total logs: 5
141
+ - [LOW] 2025-12-17T09:01:00.000Z: connected to db
142
+ - [HIGH] 2025-12-17T09:02:00.000Z: query timeout
143
+ ...
144
+ ```
145
+
146
+ ---
147
+
148
+ **Rationale:**
149
+
150
+ Use `Stream.map` and `Stream.filter` to transform streams:
151
+
152
+ - **map**: Transform each element (1→1)
153
+ - **filter**: Keep elements matching predicate (N→N, discards some)
154
+ - **Chain**: Compose multiple operations
155
+ - **Lazy**: Elements transformed on demand (no buffering)
156
+
157
+ Pattern: `stream.pipe(Stream.map(...), Stream.filter(...))`
158
+
159
+ ---
160
+
161
+
162
+ Streaming data transformations without map/filter create problems:
163
+
164
+ - **Buffering**: Must collect all data before transforming
165
+ - **Code verbosity**: Manual loops for each transformation
166
+ - **Memory usage**: Large intermediate arrays
167
+ - **Composability**: Hard to chain operations
168
+
169
+ Map/filter enable:
170
+
171
+ - **Lazy evaluation**: Transform on-demand
172
+ - **Composable**: Chain operations naturally
173
+ - **Memory efficient**: No intermediate collections
174
+ - **Expressive**: Declare intent clearly
175
+
176
+ Real-world example: Processing logs
177
+ - **Without map/filter**: Collect logs, filter by level, map to objects, transform fields
178
+ - **With map/filter**: `logStream.pipe(Stream.filter(...), Stream.map(...))`
179
+
180
+ ---
181
+
182
+ ---
183
+
184
+
185
+ ## 🟡 Intermediate Patterns
186
+
187
+ ### Stream Pattern 2: Merge and Combine Multiple Streams
188
+
189
+ **Rule:** Use merge and concat combinators to combine multiple streams, enabling aggregation of data from multiple independent sources.
190
+
191
+ **Good Example:**
192
+
193
+ This example demonstrates merging multiple event streams into a unified stream.
194
+
195
+ ```typescript
196
+ import { Stream, Effect, Chunk } from "effect";
197
+
198
+ interface Event {
199
+ readonly source: string;
200
+ readonly type: string;
201
+ readonly data: string;
202
+ readonly timestamp: Date;
203
+ }
204
+
205
+ // Create independent event streams from different sources
206
+ const createUserEventStream = (): Stream.Stream<Event> =>
207
+ Stream.fromIterable([
208
+ { source: "user-service", type: "login", data: "user-123", timestamp: new Date(Date.now() + 0) },
209
+ { source: "user-service", type: "logout", data: "user-123", timestamp: new Date(Date.now() + 500) },
210
+ ]).pipe(
211
+ Stream.tap(() => Effect.sleep("500 millis"))
212
+ );
213
+
214
+ const createPaymentEventStream = (): Stream.Stream<Event> =>
215
+ Stream.fromIterable([
216
+ { source: "payment-service", type: "payment-started", data: "order-456", timestamp: new Date(Date.now() + 200) },
217
+ { source: "payment-service", type: "payment-completed", data: "order-456", timestamp: new Date(Date.now() + 800) },
218
+ ]).pipe(
219
+ Stream.tap(() => Effect.sleep("600 millis"))
220
+ );
221
+
222
+ const createAuditEventStream = (): Stream.Stream<Event> =>
223
+ Stream.fromIterable([
224
+ { source: "audit-log", type: "access-granted", data: "resource-789", timestamp: new Date(Date.now() + 100) },
225
+ { source: "audit-log", type: "access-revoked", data: "resource-789", timestamp: new Date(Date.now() + 900) },
226
+ ]).pipe(
227
+ Stream.tap(() => Effect.sleep("800 millis"))
228
+ );
229
+
230
+ // Merge streams (interleaved, unordered)
231
+ const mergedEventStream = (): Stream.Stream<Event> => {
232
+ const userStream = createUserEventStream();
233
+ const paymentStream = createPaymentEventStream();
234
+ const auditStream = createAuditEventStream();
235
+
236
+ return Stream.merge(userStream, paymentStream, auditStream);
237
+ };
238
+
239
+ // Concat streams (sequential, ordered)
240
+ const concatenatedEventStream = (): Stream.Stream<Event> => {
241
+ return createUserEventStream().pipe(
242
+ Stream.concat(createPaymentEventStream()),
243
+ Stream.concat(createAuditEventStream())
244
+ );
245
+ };
246
+
247
+ // Main: Compare merge vs concat
248
+ const program = Effect.gen(function* () {
249
+ console.log(`\n[MERGE] Interleaved events from multiple sources:\n`);
250
+
251
+ // Collect merged stream
252
+ const mergedEvents = yield* mergedEventStream().pipe(
253
+ Stream.runCollect
254
+ );
255
+
256
+ Chunk.forEach(mergedEvents, (event, idx) => {
257
+ console.log(
258
+ ` ${idx + 1}. [${event.source}] ${event.type}: ${event.data}`
259
+ );
260
+ });
261
+
262
+ console.log(`\n[CONCAT] Sequential events (user → payment → audit):\n`);
263
+
264
+ // Collect concatenated stream
265
+ const concatEvents = yield* concatenatedEventStream().pipe(
266
+ Stream.runCollect
267
+ );
268
+
269
+ Chunk.forEach(concatEvents, (event, idx) => {
270
+ console.log(
271
+ ` ${idx + 1}. [${event.source}] ${event.type}: ${event.data}`
272
+ );
273
+ });
274
+ });
275
+
276
+ Effect.runPromise(program);
277
+ ```
278
+
279
+ Output shows merge interleaving vs concat ordering:
280
+ ```
281
+ [MERGE] Interleaved events from multiple sources:
282
+
283
+ 1. [audit-log] access-granted: resource-789
284
+ 2. [user-service] login: user-123
285
+ 3. [payment-service] payment-started: order-456
286
+ 4. [user-service] logout: user-123
287
+ 5. [payment-service] payment-completed: order-456
288
+ 6. [audit-log] access-revoked: resource-789
289
+
290
+ [CONCAT] Sequential events (user → payment → audit):
291
+
292
+ 1. [user-service] login: user-123
293
+ 2. [user-service] logout: user-123
294
+ 3. [payment-service] payment-started: order-456
295
+ 4. [payment-service] payment-completed: order-456
296
+ 5. [audit-log] access-granted: resource-789
297
+ 6. [audit-log] access-revoked: resource-789
298
+ ```
299
+
300
+ ---
301
+
302
+ **Rationale:**
303
+
304
+ Combine multiple streams using:
305
+
306
+ - **merge**: Interleave elements from multiple streams (unordered)
307
+ - **concat**: Chain streams sequentially (ordered, waits for first to complete)
308
+ - **mergeAll**: Merge collection of streams
309
+ - **zip**: Combine corresponding elements from multiple streams
310
+
311
+ Pattern: `Stream.merge(stream1, stream2)` or `stream1.pipe(Stream.concat(stream2))`
312
+
313
+ ---
314
+
315
+
316
+ Multi-source data processing without merge/concat creates issues:
317
+
318
+ - **Complex coordination**: Manual loop over multiple sources
319
+ - **Hard to aggregate**: Collecting from different sources is verbose
320
+ - **Ordering confusion**: Sequential vs. parallel unclear
321
+ - **Resource management**: Multiple independent consumers
322
+
323
+ Merge/concat enable:
324
+
325
+ - **Simple composition**: Combine streams naturally
326
+ - **Semantic clarity**: Merge = parallel, concat = sequential
327
+ - **Aggregation**: Single consumer for multiple sources
328
+ - **Scalability**: Add sources without refactoring
329
+
330
+ Real-world example: Aggregating user events
331
+ - **Without merge**: Poll user service, poll event log, poll notifications separately
332
+ - **With merge**: `Stream.merge(userStream, eventStream, notificationStream)`
333
+
334
+ ---
335
+
336
+ ---
337
+
338
+ ### Stream Pattern 3: Control Backpressure in Streams
339
+
340
+ **Rule:** Use backpressure control to manage flow between fast producers and slow consumers, preventing memory exhaustion and resource overflow.
341
+
342
+ **Good Example:**
343
+
344
+ This example demonstrates managing backpressure when consuming events at different rates.
345
+
346
+ ```typescript
347
+ import { Stream, Effect, Chunk } from "effect";
348
+
349
+ interface DataPoint {
350
+ readonly id: number;
351
+ readonly value: number;
352
+ }
353
+
354
+ // Fast producer: generates 100 items per second
355
+ const fastProducer = (): Stream.Stream<DataPoint> =>
356
+ Stream.fromIterable(Array.from({ length: 100 }, (_, i) => ({ id: i, value: Math.random() }))).pipe(
357
+ Stream.tap(() => Effect.sleep("10 millis")) // 10ms per item = 100/sec
358
+ );
359
+
360
+ // Slow consumer: processes 10 items per second
361
+ const slowConsumer = (item: DataPoint): Effect.Effect<void> =>
362
+ Effect.gen(function* () {
363
+ yield* Effect.sleep("100 millis"); // 100ms per item = 10/sec
364
+ });
365
+
366
+ // Without backpressure (DANGEROUS - queue grows unbounded)
367
+ const unbufferedStream = (): Stream.Stream<DataPoint> =>
368
+ fastProducer().pipe(
369
+ Stream.tap((item) =>
370
+ Effect.log(`[UNBUFFERED] Produced item ${item.id}`)
371
+ )
372
+ );
373
+
374
+ // With bounded buffer (backpressure kicks in)
375
+ const bufferedStream = (bufferSize: number): Stream.Stream<DataPoint> =>
376
+ fastProducer().pipe(
377
+ // Buffer at most 10 items; if full, producer waits
378
+ Stream.buffer(bufferSize),
379
+ Stream.tap((item) =>
380
+ Effect.log(`[BUFFERED] Consumed item ${item.id}`)
381
+ )
382
+ );
383
+
384
+ // With throttling (rate limit emission)
385
+ const throttledStream = (): Stream.Stream<DataPoint> =>
386
+ fastProducer().pipe(
387
+ // Emit at most 1 item per 50ms (20/sec)
388
+ Stream.throttle(1, "50 millis"),
389
+ Stream.tap((item) =>
390
+ Effect.log(`[THROTTLED] Item ${item.id}`)
391
+ )
392
+ );
393
+
394
+ // Main: compare approaches
395
+ const program = Effect.gen(function* () {
396
+ console.log(`\n[START] Demonstrating backpressure management\n`);
397
+
398
+ // Test buffered approach
399
+ console.log(`[TEST 1] Buffered stream (buffer size 5):\n`);
400
+
401
+ const startBuffer = Date.now();
402
+
403
+ yield* bufferedStream(5).pipe(
404
+ Stream.take(20), // Take only 20 items
405
+ Stream.runForEach(slowConsumer)
406
+ );
407
+
408
+ const bufferTime = Date.now() - startBuffer;
409
+ console.log(`\n[RESULT] Buffered approach took ${bufferTime}ms\n`);
410
+
411
+ // Test throttled approach
412
+ console.log(`[TEST 2] Throttled stream (1 item per 50ms):\n`);
413
+
414
+ const startThrottle = Date.now();
415
+
416
+ yield* throttledStream().pipe(
417
+ Stream.take(20),
418
+ Stream.runForEach(slowConsumer)
419
+ );
420
+
421
+ const throttleTime = Date.now() - startThrottle;
422
+ console.log(`\n[RESULT] Throttled approach took ${throttleTime}ms\n`);
423
+
424
+ // Summary
425
+ console.log(`[SUMMARY]`);
426
+ console.log(` Without backpressure control:`);
427
+ console.log(` - Queue would grow to 100 items (memory risk)`);
428
+ console.log(` - Producer/consumer operate independently`);
429
+ console.log(` With buffering:`);
430
+ console.log(` - Queue bounded to 5 items (safe)`);
431
+ console.log(` - Producer waits when buffer full`);
432
+ console.log(` With throttling:`);
433
+ console.log(` - Production rate limited to 20/sec`);
434
+ console.log(` - Smooth controlled flow`);
435
+ });
436
+
437
+ Effect.runPromise(program);
438
+ ```
439
+
440
+ ---
441
+
442
+ **Rationale:**
443
+
444
+ Backpressure is flow control: slow consumer tells fast producer to slow down.
445
+
446
+ Techniques:
447
+ - **Buffering**: Store items temporarily (limited queue)
448
+ - **Throttling**: Rate limit item emission
449
+ - **Chunking**: Process in fixed-size batches
450
+ - **Debouncing**: Skip rapid duplicates
451
+
452
+ Pattern: `stream.pipe(Stream.throttle(...), Stream.buffer(...))`
453
+
454
+ ---
455
+
456
+
457
+ Without backpressure management, mismatched producer/consumer speeds cause:
458
+
459
+ - **Memory exhaustion**: Producer faster than consumer → queue grows unbounded
460
+ - **Garbage collection pauses**: Large buffers cause GC pressure
461
+ - **Resource leaks**: Open connections/file handles accumulate
462
+ - **Cascade failures**: One slow consumer blocks entire pipeline
463
+
464
+ Backpressure enable:
465
+
466
+ - **Memory safety**: Bounded buffers prevent overflow
467
+ - **Resource efficiency**: Consumers pace producers naturally
468
+ - **Performance**: Tuning buffer sizes improves throughput
469
+ - **Observability**: Monitor backpressure as health indicator
470
+
471
+ Real-world example: Reading large file vs. writing to database
472
+ - **No backpressure**: Read entire file into memory, write slowly → memory exhaustion
473
+ - **With backpressure**: Read 1000 lines, wait for database, read next batch
474
+
475
+ ---
476
+
477
+ ---
478
+
479
+ ### Stream Pattern 4: Stateful Operations with Scan and Fold
480
+
481
+ **Rule:** Use scan for stateful element-by-element processing and fold for final aggregation, enabling complex stream analytics without buffering entire stream.
482
+
483
+ **Good Example:**
484
+
485
+ This example demonstrates maintaining statistics across a stream of measurements.
486
+
487
+ ```typescript
488
+ import { Stream, Effect, Chunk } from "effect";
489
+
490
+ interface Measurement {
491
+ readonly id: number;
492
+ readonly value: number;
493
+ readonly timestamp: Date;
494
+ }
495
+
496
+ interface RunningStats {
497
+ readonly count: number;
498
+ readonly sum: number;
499
+ readonly min: number;
500
+ readonly max: number;
501
+ readonly average: number;
502
+ readonly variance: number;
503
+ readonly lastValue: number;
504
+ }
505
+
506
+ // Create stream of measurements
507
+ const createMeasurementStream = (): Stream.Stream<Measurement> =>
508
+ Stream.fromIterable([
509
+ { id: 1, value: 10, timestamp: new Date() },
510
+ { id: 2, value: 20, timestamp: new Date() },
511
+ { id: 3, value: 15, timestamp: new Date() },
512
+ { id: 4, value: 25, timestamp: new Date() },
513
+ { id: 5, value: 30, timestamp: new Date() },
514
+ { id: 6, value: 22, timestamp: new Date() },
515
+ ]);
516
+
517
+ // Initial statistics state
518
+ const initialStats: RunningStats = {
519
+ count: 0,
520
+ sum: 0,
521
+ min: Infinity,
522
+ max: -Infinity,
523
+ average: 0,
524
+ variance: 0,
525
+ lastValue: 0,
526
+ };
527
+
528
+ // Reducer: update stats for each measurement
529
+ const updateStats = (
530
+ stats: RunningStats,
531
+ measurement: Measurement
532
+ ): RunningStats => {
533
+ const newCount = stats.count + 1;
534
+ const newSum = stats.sum + measurement.value;
535
+ const newAverage = newSum / newCount;
536
+
537
+ // Calculate variance incrementally
538
+ const delta = measurement.value - stats.average;
539
+ const delta2 = measurement.value - newAverage;
540
+ const newVariance = stats.variance + delta * delta2;
541
+
542
+ return {
543
+ count: newCount,
544
+ sum: newSum,
545
+ min: Math.min(stats.min, measurement.value),
546
+ max: Math.max(stats.max, measurement.value),
547
+ average: newAverage,
548
+ variance: newVariance / newCount,
549
+ lastValue: measurement.value,
550
+ };
551
+ };
552
+
553
+ // Main: demonstrate scan with statistics
554
+ const program = Effect.gen(function* () {
555
+ console.log(`\n[SCAN] Running statistics stream:\n`);
556
+
557
+ // Use scan to emit intermediate statistics
558
+ const statsStream = createMeasurementStream().pipe(
559
+ Stream.scan(initialStats, (stats, measurement) => {
560
+ const newStats = updateStats(stats, measurement);
561
+
562
+ console.log(
563
+ `[MEASUREMENT ${measurement.id}] Value: ${measurement.value}`
564
+ );
565
+ console.log(
566
+ ` Count: ${newStats.count}, Avg: ${newStats.average.toFixed(2)}, ` +
567
+ `Min: ${newStats.min}, Max: ${newStats.max}, ` +
568
+ `Variance: ${newStats.variance.toFixed(2)}`
569
+ );
570
+
571
+ return newStats;
572
+ })
573
+ );
574
+
575
+ // Collect all intermediate stats
576
+ const allStats = yield* statsStream.pipe(Stream.runCollect);
577
+
578
+ // Final statistics
579
+ const finalStats = Chunk.last(allStats);
580
+
581
+ if (finalStats._tag === "Some") {
582
+ console.log(`\n[FINAL STATISTICS]`);
583
+ console.log(` Total measurements: ${finalStats.value.count}`);
584
+ console.log(` Average: ${finalStats.value.average.toFixed(2)}`);
585
+ console.log(` Min: ${finalStats.value.min}`);
586
+ console.log(` Max: ${finalStats.value.max}`);
587
+ console.log(
588
+ ` Std Dev: ${Math.sqrt(finalStats.value.variance).toFixed(2)}`
589
+ );
590
+ }
591
+
592
+ // Compare with fold (emit only final result)
593
+ console.log(`\n[FOLD] Final statistics only:\n`);
594
+
595
+ const finalResult = yield* createMeasurementStream().pipe(
596
+ Stream.fold(initialStats, updateStats),
597
+ Stream.tap((stats) =>
598
+ Effect.log(`Final: Count=${stats.count}, Avg=${stats.average.toFixed(2)}`)
599
+ )
600
+ );
601
+ });
602
+
603
+ Effect.runPromise(program);
604
+ ```
605
+
606
+ ---
607
+
608
+ **Rationale:**
609
+
610
+ Stateful stream operations:
611
+
612
+ - **scan**: Apply function with accumulator, emit intermediate states
613
+ - **fold**: Apply function with accumulator, emit only final result
614
+ - **reduce**: Like fold but requires non-empty stream
615
+
616
+ Pattern: `stream.pipe(Stream.scan(initialState, reducer))` or `Stream.fold(initialState, reducer)`
617
+
618
+ ---
619
+
620
+
621
+ Processing streams without scan/fold creates issues:
622
+
623
+ - **Manual state tracking**: Ref or mutable variables outside stream
624
+ - **Lost context**: Hard to correlate intermediate values
625
+ - **Error-prone**: Easy to forget state updates
626
+ - **Testing difficulty**: State spread across code
627
+
628
+ Scan/fold enable:
629
+
630
+ - **Declarative state**: State threaded through stream
631
+ - **Intermediate values**: Emit state at each step (scan)
632
+ - **Type-safe**: Accumulator type guaranteed
633
+ - **Composable**: Chain stateful operations
634
+
635
+ Real-world example: Running average of metrics
636
+ - **Without scan**: Track count and sum manually, calculate average, emit
637
+ - **With scan**: `stream.pipe(Stream.scan(initialState, updateAverage))`
638
+
639
+ ---
640
+
641
+ ---
642
+
643
+
644
+ ## 🟠 Advanced Patterns
645
+
646
+ ### Stream Pattern 5: Grouping and Windowing Streams
647
+
648
+ **Rule:** Use groupBy to partition streams by key and tumbling/sliding windows to aggregate streams over time windows.
649
+
650
+ **Good Example:**
651
+
652
+ This example demonstrates windowing and grouping patterns.
653
+
654
+ ```typescript
655
+ import { Effect, Stream, Ref, Duration, Schedule } from "effect";
656
+
657
+ interface Event {
658
+ readonly timestamp: Date;
659
+ readonly userId: string;
660
+ readonly action: string;
661
+ readonly duration: number; // milliseconds
662
+ }
663
+
664
+ // Simulate event stream
665
+ const generateEvents = (): Event[] => [
666
+ { timestamp: new Date(Date.now() - 5000), userId: "user1", action: "click", duration: 100 },
667
+ { timestamp: new Date(Date.now() - 4500), userId: "user2", action: "view", duration: 250 },
668
+ { timestamp: new Date(Date.now() - 4000), userId: "user1", action: "scroll", duration: 150 },
669
+ { timestamp: new Date(Date.now() - 3500), userId: "user3", action: "click", duration: 120 },
670
+ { timestamp: new Date(Date.now() - 3000), userId: "user2", action: "click", duration: 180 },
671
+ { timestamp: new Date(Date.now() - 2500), userId: "user1", action: "view", duration: 200 },
672
+ { timestamp: new Date(Date.now() - 2000), userId: "user3", action: "view", duration: 300 },
673
+ { timestamp: new Date(Date.now() - 1500), userId: "user1", action: "submit", duration: 500 },
674
+ { timestamp: new Date(Date.now() - 1000), userId: "user2", action: "scroll", duration: 100 },
675
+ ];
676
+
677
+ // Main: windowing and grouping examples
678
+ const program = Effect.gen(function* () {
679
+ console.log(`\n[WINDOWING & GROUPING] Stream organization patterns\n`);
680
+
681
+ const events = generateEvents();
682
+
683
+ // Example 1: Tumbling window (fixed-size batches)
684
+ console.log(`[1] Tumbling window (2-event batches):\n`);
685
+
686
+ const windowSize = 2;
687
+ let batchNumber = 1;
688
+
689
+ for (let i = 0; i < events.length; i += windowSize) {
690
+ const batch = events.slice(i, i + windowSize);
691
+
692
+ yield* Effect.log(`[WINDOW ${batchNumber}] (${batch.length} events)`);
693
+
694
+ let totalDuration = 0;
695
+
696
+ for (const event of batch) {
697
+ yield* Effect.log(
698
+ ` - ${event.userId}: ${event.action} (${event.duration}ms)`
699
+ );
700
+
701
+ totalDuration += event.duration;
702
+ }
703
+
704
+ yield* Effect.log(`[WINDOW ${batchNumber}] Total duration: ${totalDuration}ms\n`);
705
+
706
+ batchNumber++;
707
+ }
708
+
709
+ // Example 2: Sliding window (overlapping)
710
+ console.log(`[2] Sliding window (last 3 events, slide by 1):\n`);
711
+
712
+ const windowSizeSlide = 3;
713
+ const slideBy = 1;
714
+
715
+ for (let i = 0; i <= events.length - windowSizeSlide; i += slideBy) {
716
+ const window = events.slice(i, i + windowSizeSlide);
717
+
718
+ const avgDuration =
719
+ window.reduce((sum, e) => sum + e.duration, 0) / window.length;
720
+
721
+ yield* Effect.log(
722
+ `[SLIDE ${i / slideBy}] ${window.length} events, avg duration: ${avgDuration.toFixed(0)}ms`
723
+ );
724
+ }
725
+
726
+ // Example 3: Group by key
727
+ console.log(`\n[3] Group by user:\n`);
728
+
729
+ const byUser = new Map<string, Event[]>();
730
+
731
+ for (const event of events) {
732
+ if (!byUser.has(event.userId)) {
733
+ byUser.set(event.userId, []);
734
+ }
735
+
736
+ byUser.get(event.userId)!.push(event);
737
+ }
738
+
739
+ for (const [userId, userEvents] of byUser) {
740
+ const totalActions = userEvents.length;
741
+ const totalTime = userEvents.reduce((sum, e) => sum + e.duration, 0);
742
+ const avgTime = totalTime / totalActions;
743
+
744
+ yield* Effect.log(
745
+ `[USER ${userId}] ${totalActions} actions, ${totalTime}ms total, ${avgTime.toFixed(0)}ms avg`
746
+ );
747
+ }
748
+
749
+ // Example 4: Group + Window combination
750
+ console.log(`\n[4] Group by user, window by action type:\n`);
751
+
752
+ for (const [userId, userEvents] of byUser) {
753
+ const byAction = new Map<string, Event[]>();
754
+
755
+ for (const event of userEvents) {
756
+ if (!byAction.has(event.action)) {
757
+ byAction.set(event.action, []);
758
+ }
759
+
760
+ byAction.get(event.action)!.push(event);
761
+ }
762
+
763
+ yield* Effect.log(`[USER ${userId}] Action breakdown:`);
764
+
765
+ for (const [action, actionEvents] of byAction) {
766
+ const count = actionEvents.length;
767
+ const total = actionEvents.reduce((sum, e) => sum + e.duration, 0);
768
+
769
+ yield* Effect.log(` ${action}: ${count}x (${total}ms total)`);
770
+ }
771
+ }
772
+
773
+ // Example 5: Session window (based on inactivity timeout)
774
+ console.log(`\n[5] Session window (gap > 1000ms = new session):\n`);
775
+
776
+ const sessionGapMs = 1000;
777
+ const sessions: Event[][] = [];
778
+ let currentSession: Event[] = [];
779
+ let lastTimestamp = events[0]?.timestamp.getTime() ?? 0;
780
+
781
+ for (const event of events) {
782
+ const currentTime = event.timestamp.getTime();
783
+ const timeSinceLastEvent = currentTime - lastTimestamp;
784
+
785
+ if (timeSinceLastEvent > sessionGapMs && currentSession.length > 0) {
786
+ sessions.push(currentSession);
787
+ yield* Effect.log(
788
+ `[SESSION] Closed (${currentSession.length} events, gap: ${timeSinceLastEvent}ms)`
789
+ );
790
+
791
+ currentSession = [];
792
+ }
793
+
794
+ currentSession.push(event);
795
+ lastTimestamp = currentTime;
796
+ }
797
+
798
+ if (currentSession.length > 0) {
799
+ sessions.push(currentSession);
800
+ yield* Effect.log(`[SESSION] Final (${currentSession.length} events)`);
801
+ }
802
+
803
+ // Example 6: Top-K aggregation in window
804
+ console.log(`\n[6] Top 2 actions in last window:\n`);
805
+
806
+ const lastWindow = events.slice(-3);
807
+
808
+ const actionCounts = new Map<string, number>();
809
+
810
+ for (const event of lastWindow) {
811
+ actionCounts.set(
812
+ event.action,
813
+ (actionCounts.get(event.action) ?? 0) + 1
814
+ );
815
+ }
816
+
817
+ const topActions = Array.from(actionCounts.entries())
818
+ .sort((a, b) => b[1] - a[1])
819
+ .slice(0, 2);
820
+
821
+ yield* Effect.log(`[TOP-K] In last window of 3 events:`);
822
+
823
+ for (const [action, count] of topActions) {
824
+ yield* Effect.log(` ${action}: ${count}x`);
825
+ }
826
+ });
827
+
828
+ Effect.runPromise(program);
829
+ ```
830
+
831
+ ---
832
+
833
+ **Rationale:**
834
+
835
+ Windowing organizes unbounded streams into bounded chunks:
836
+
837
+ - **Tumbling window**: Fixed-size non-overlapping (e.g., 1-sec windows)
838
+ - **Sliding window**: Overlapping windows (e.g., 10-sec window, 5-sec hop)
839
+ - **Group by key**: Partition stream by field value
840
+ - **Session window**: Event-based windows (e.g., idle timeout)
841
+ - **Batch aggregation**: Process N items or wait T seconds
842
+
843
+ Pattern: `Stream.groupBy()`, custom windowing with `Ref` and `Schedule`
844
+
845
+ ---
846
+
847
+
848
+ Unbounded streams need boundaries:
849
+
850
+ **Problem 1: Memory exhaustion**
851
+ - Processing 1M events with no boundary = keep all in memory
852
+ - Cumulative memory grows unbounded
853
+ - Eventually OOM error
854
+
855
+ **Problem 2: Late aggregation**
856
+ - Can't sum all events until stream ends (never)
857
+ - Need to decide: "sum events in this 1-second window"
858
+
859
+ **Problem 3: Grouping complexity**
860
+ - Stream of user events: need per-user aggregation
861
+ - Without groupBy: manual state tracking (error-prone)
862
+
863
+ **Problem 4: Temporal patterns**
864
+ - "Top 10 searches in last 5 minutes" requires windowing
865
+ - "Average response time per endpoint per minute" requires grouping + windowing
866
+
867
+ Solutions:
868
+
869
+ **Tumbling window**:
870
+ - Divide stream into 1-sec, 5-sec, or 1-min chunks
871
+ - Process each chunk independently
872
+ - Clear memory between windows
873
+ - Natural for: metrics, batching, reports
874
+
875
+ **Sliding window**:
876
+ - Keep last 5 minutes of data at all times
877
+ - Emit updated aggregation every second
878
+ - Detect patterns over overlapping periods
879
+ - Natural for: anomaly detection, trends
880
+
881
+ **Group by**:
882
+ - Separate streams by key
883
+ - Each key has independent state
884
+ - Emit grouped results
885
+ - Natural for: per-user, per-endpoint, per-tenant
886
+
887
+ ---
888
+
889
+ ---
890
+
891
+ ### Stream Pattern 6: Resource Management in Streams
892
+
893
+ **Rule:** Use Stream.bracket or effect scoping to guarantee resource cleanup, preventing leaks even when streams fail or are interrupted.
894
+
895
+ **Good Example:**
896
+
897
+ This example demonstrates resource acquisition, use, and guaranteed cleanup.
898
+
899
+ ```typescript
900
+ import { Effect, Stream, Resource, Scope, Ref } from "effect";
901
+
902
+ interface FileHandle {
903
+ readonly path: string;
904
+ readonly fd: number;
905
+ }
906
+
907
+ interface Connection {
908
+ readonly id: string;
909
+ readonly isOpen: boolean;
910
+ }
911
+
912
+ // Simulate resource management
913
+ const program = Effect.gen(function* () {
914
+ console.log(`\n[RESOURCE MANAGEMENT] Stream resource lifecycle\n`);
915
+
916
+ // Example 1: Bracket pattern for file streams
917
+ console.log(`[1] Bracket pattern (acquire → use → release):\n`);
918
+
919
+ let openHandles = 0;
920
+ let closedHandles = 0;
921
+
922
+ const openFile = (path: string) =>
923
+ Effect.gen(function* () {
924
+ openHandles++;
925
+ yield* Effect.log(`[OPEN] File "${path}" (total open: ${openHandles})`);
926
+
927
+ return { path, fd: 1000 + openHandles };
928
+ });
929
+
930
+ const closeFile = (handle: FileHandle) =>
931
+ Effect.gen(function* () {
932
+ closedHandles++;
933
+ yield* Effect.log(`[CLOSE] File "${handle.path}" (total closed: ${closedHandles})`);
934
+ });
935
+
936
+ const readFileWithBracket = (path: string) =>
937
+ Effect.gen(function* () {
938
+ let handle: FileHandle | null = null;
939
+
940
+ try {
941
+ handle = yield* openFile(path);
942
+
943
+ yield* Effect.log(
944
+ `[USE] Reading from fd ${handle.fd} ("${handle.path}")`
945
+ );
946
+
947
+ // Simulate reading
948
+ return "file contents";
949
+ } finally {
950
+ // Guaranteed to run even if error occurs above
951
+ if (handle) {
952
+ yield* closeFile(handle);
953
+ }
954
+ }
955
+ });
956
+
957
+ // Test with success
958
+ yield* Effect.log(`[TEST] Success case:`);
959
+
960
+ const content = yield* readFileWithBracket("/data/file.txt");
961
+
962
+ yield* Effect.log(`[RESULT] Got: "${content}"\n`);
963
+
964
+ // Test with failure (simulated)
965
+ yield* Effect.log(`[TEST] Error case:`);
966
+
967
+ const failCase = Effect.gen(function* () {
968
+ let handle: FileHandle | null = null;
969
+
970
+ try {
971
+ handle = yield* openFile("/data/missing.txt");
972
+
973
+ // Simulate error mid-operation
974
+ yield* Effect.fail(new Error("Read failed"));
975
+ } finally {
976
+ if (handle) {
977
+ yield* closeFile(handle);
978
+ }
979
+ }
980
+ }).pipe(
981
+ Effect.catchAll((error) =>
982
+ Effect.gen(function* () {
983
+ yield* Effect.log(`[ERROR] Caught: ${error.message}`);
984
+ yield* Effect.log(`[CHECK] Closed handles: ${closedHandles} (verifying cleanup)\n`);
985
+ })
986
+ )
987
+ );
988
+
989
+ yield* failCase;
990
+
991
+ // Example 2: Connection pool management
992
+ console.log(`[2] Connection pooling:\n`);
993
+
994
+ interface ConnectionPool {
995
+ acquire: () => Effect.Effect<Connection>;
996
+ release: (conn: Connection) => Effect.Effect<void>;
997
+ }
998
+
999
+ const createConnectionPool = (maxSize: number): Effect.Effect<ConnectionPool> =>
1000
+ Effect.gen(function* () {
1001
+ const available = yield* Ref.make<Connection[]>([]);
1002
+ const inUse = yield* Ref.make<Set<string>>(new Set());
1003
+ let idCounter = 0;
1004
+
1005
+ return {
1006
+ acquire: Effect.gen(function* () {
1007
+ const avail = yield* Ref.get(available);
1008
+
1009
+ if (avail.length > 0) {
1010
+ yield* Effect.log(`[POOL] Reusing connection from pool`);
1011
+
1012
+ const conn = avail.pop()!;
1013
+
1014
+ yield* Ref.modify(inUse, (set) => [
1015
+ undefined,
1016
+ new Set(set).add(conn.id),
1017
+ ]);
1018
+
1019
+ return conn;
1020
+ }
1021
+
1022
+ const inUseCount = (yield* Ref.get(inUse)).size;
1023
+
1024
+ if (inUseCount >= maxSize) {
1025
+ yield* Effect.fail(new Error("Pool exhausted"));
1026
+ }
1027
+
1028
+ const connId = `conn-${++idCounter}`;
1029
+
1030
+ yield* Effect.log(`[POOL] Creating new connection: ${connId}`);
1031
+
1032
+ const conn = { id: connId, isOpen: true };
1033
+
1034
+ yield* Ref.modify(inUse, (set) => [
1035
+ undefined,
1036
+ new Set(set).add(connId),
1037
+ ]);
1038
+
1039
+ return conn;
1040
+ }),
1041
+
1042
+ release: (conn: Connection) =>
1043
+ Effect.gen(function* () {
1044
+ yield* Ref.modify(inUse, (set) => {
1045
+ const updated = new Set(set);
1046
+ updated.delete(conn.id);
1047
+ return [undefined, updated];
1048
+ });
1049
+
1050
+ yield* Ref.modify(available, (avail) => [
1051
+ undefined,
1052
+ [...avail, conn],
1053
+ ]);
1054
+
1055
+ yield* Effect.log(`[POOL] Returned connection: ${conn.id}`);
1056
+ }),
1057
+ };
1058
+ });
1059
+
1060
+ const pool = yield* createConnectionPool(3);
1061
+
1062
+ // Acquire and release connections
1063
+ const conn1 = yield* pool.acquire();
1064
+ const conn2 = yield* pool.acquire();
1065
+
1066
+ yield* pool.release(conn1);
1067
+
1068
+ const conn3 = yield* pool.acquire(); // Reuses conn1
1069
+
1070
+ yield* Effect.log(`\n`);
1071
+
1072
+ // Example 3: Scope-based resource safety
1073
+ console.log(`[3] Scoped resources (hierarchical cleanup):\n`);
1074
+
1075
+ let scopedCount = 0;
1076
+
1077
+ const withScoped = <R,>(create: () => Effect.Effect<R>) =>
1078
+ Effect.gen(function* () {
1079
+ scopedCount++;
1080
+ const id = scopedCount;
1081
+
1082
+ yield* Effect.log(`[SCOPE] Enter scope ${id}`);
1083
+
1084
+ const resource = yield* create();
1085
+
1086
+ yield* Effect.log(`[SCOPE] Using resource in scope ${id}`);
1087
+
1088
+ yield* Effect.sync(() => {
1089
+ // Cleanup happens here when scope exits
1090
+ yield* Effect.log(`[SCOPE] Exit scope ${id}`);
1091
+ }).pipe(
1092
+ Effect.ensuring(
1093
+ Effect.log(`[SCOPE] Cleanup guaranteed for scope ${id}`)
1094
+ )
1095
+ );
1096
+
1097
+ return resource;
1098
+ });
1099
+
1100
+ // Nested scopes
1101
+ const result = yield* withScoped(() =>
1102
+ Effect.succeed({
1103
+ level: 1,
1104
+ data: yield* withScoped(() => Effect.succeed("inner data")),
1105
+ })
1106
+ ).pipe(
1107
+ Effect.catchAll(() => Effect.succeed({ level: 0, data: null }))
1108
+ );
1109
+
1110
+ yield* Effect.log(`[SCOPES] Cleanup order: inner → outer\n`);
1111
+
1112
+ // Example 4: Stream resource management
1113
+ console.log(`[4] Stream with resource cleanup:\n`);
1114
+
1115
+ let streamResourceCount = 0;
1116
+
1117
+ // Simulate stream that acquires resources
1118
+ const streamWithResources = Stream.empty.pipe(
1119
+ Stream.tap(() =>
1120
+ Effect.gen(function* () {
1121
+ streamResourceCount++;
1122
+ yield* Effect.log(`[STREAM-RES] Acquired resource ${streamResourceCount}`);
1123
+ })
1124
+ ),
1125
+ // Cleanup when stream ends
1126
+ Stream.ensuring(
1127
+ Effect.log(`[STREAM-RES] Cleaning up all ${streamResourceCount} resources`)
1128
+ )
1129
+ );
1130
+
1131
+ yield* Stream.runDrain(streamWithResources);
1132
+
1133
+ // Example 5: Error propagation with cleanup
1134
+ console.log(`\n[5] Error safety with cleanup:\n`);
1135
+
1136
+ const safeRead = (retryCount: number) =>
1137
+ Effect.gen(function* () {
1138
+ let handle: FileHandle | null = null;
1139
+
1140
+ try {
1141
+ handle = yield* openFile(`/data/file-${retryCount}.txt`);
1142
+
1143
+ if (retryCount < 2) {
1144
+ yield* Effect.log(`[READ] Attempt ${retryCount}: failing intentionally`);
1145
+ yield* Effect.fail(new Error(`Attempt ${retryCount} failed`));
1146
+ }
1147
+
1148
+ yield* Effect.log(`[READ] Success on attempt ${retryCount}`);
1149
+
1150
+ return "success";
1151
+ } finally {
1152
+ if (handle) {
1153
+ yield* closeFile(handle);
1154
+ }
1155
+ }
1156
+ });
1157
+
1158
+ // Retry with guaranteed cleanup
1159
+ const result2 = yield* safeRead(1).pipe(
1160
+ Effect.retry(
1161
+ Schedule.recurs(2).pipe(
1162
+ Schedule.compose(Schedule.fixed("10 millis"))
1163
+ )
1164
+ ),
1165
+ Effect.catchAll((error) =>
1166
+ Effect.gen(function* () {
1167
+ yield* Effect.log(`[FINAL] All retries failed: ${error.message}`);
1168
+ return "fallback";
1169
+ })
1170
+ )
1171
+ );
1172
+
1173
+ yield* Effect.log(`\n[FINAL] Result: ${result2}`);
1174
+ });
1175
+
1176
+ Effect.runPromise(program);
1177
+ ```
1178
+
1179
+ ---
1180
+
1181
+ **Rationale:**
1182
+
1183
+ Streams must clean up resources deterministically:
1184
+
1185
+ - **Acquire/Release**: Get resource, use, return resource
1186
+ - **Bracket pattern**: Ensure cleanup on success or failure
1187
+ - **Scope safety**: Guarantee cleanup even on exceptions
1188
+ - **Connection pooling**: Reuse connections, prevent exhaustion
1189
+ - **Concurrent cleanup**: Handle cleanup under concurrency
1190
+
1191
+ Pattern: `Stream.bracket()`, `Resource.make()`, `Scope` for resource safety
1192
+
1193
+ ---
1194
+
1195
+
1196
+ Streams without resource management cause problems:
1197
+
1198
+ **Problem 1: Resource exhaustion**
1199
+ - Open file streams without closing → file descriptor limit exceeded
1200
+ - Get connections from pool, never return → connection starvation
1201
+ - System becomes unresponsive
1202
+
1203
+ **Problem 2: Memory leaks**
1204
+ - Stream emits large objects → memory grows
1205
+ - Without cleanup → garbage persists
1206
+ - GC can't reclaim
1207
+
1208
+ **Problem 3: Data corruption**
1209
+ - Write to file without flush → partial writes on crash
1210
+ - Read from connection while another thread writes → data race
1211
+ - Results are unpredictable
1212
+
1213
+ **Problem 4: Silent failures**
1214
+ - Resource cleanup fails → error lost
1215
+ - Application proceeds as if successful
1216
+ - Hidden bug becomes hard-to-trace crash later
1217
+
1218
+ Solutions:
1219
+
1220
+ **Bracket pattern**:
1221
+ - Acquire resource
1222
+ - Use resource (even if error)
1223
+ - Always release resource
1224
+ - Track errors separately
1225
+
1226
+ **Resource scopes**:
1227
+ - Nested resource management
1228
+ - Parent cleanup waits for children
1229
+ - Hierarchical resource graphs
1230
+ - Type-safe guarantees
1231
+
1232
+ **Connection pooling**:
1233
+ - Reuse connections
1234
+ - Track available/in-use
1235
+ - Prevent exhaustion
1236
+ - Support graceful shutdown
1237
+
1238
+ ---
1239
+
1240
+ ---
1241
+
1242
+ ### Stream Pattern 7: Error Handling in Streams
1243
+
1244
+ **Rule:** Use Stream error handlers to recover from failures, retry operations, and maintain stream integrity even when individual elements fail.
1245
+
1246
+ **Good Example:**
1247
+
1248
+ This example demonstrates stream error handling patterns.
1249
+
1250
+ ```typescript
1251
+ import { Effect, Stream, Ref } from "effect";
1252
+
1253
+ interface DataRecord {
1254
+ id: string;
1255
+ value: number;
1256
+ }
1257
+
1258
+ interface ProcessingResult {
1259
+ successful: DataRecord[];
1260
+ failed: Array<{ id: string; error: string }>;
1261
+ retried: number;
1262
+ }
1263
+
1264
+ const program = Effect.gen(function* () {
1265
+ console.log(`\n[STREAM ERROR HANDLING] Resilient stream processing\n`);
1266
+
1267
+ // Example 1: Continue on error (skip failed, process rest)
1268
+ console.log(`[1] Continue processing despite errors:\n`);
1269
+
1270
+ const processElement = (record: DataRecord): Effect.Effect<string> =>
1271
+ Effect.gen(function* () {
1272
+ if (record.value < 0) {
1273
+ yield* Effect.fail(new Error(`Invalid value: ${record.value}`));
1274
+ }
1275
+
1276
+ return `processed-${record.id}`;
1277
+ });
1278
+
1279
+ const records = [
1280
+ { id: "rec1", value: 10 },
1281
+ { id: "rec2", value: -5 }, // Will fail
1282
+ { id: "rec3", value: 20 },
1283
+ { id: "rec4", value: -1 }, // Will fail
1284
+ { id: "rec5", value: 30 },
1285
+ ];
1286
+
1287
+ const successfulProcessing = yield* Stream.fromIterable(records).pipe(
1288
+ Stream.mapEffect((record) =>
1289
+ processElement(record).pipe(
1290
+ Effect.map((result) => ({ success: true, result })),
1291
+ Effect.catchAll((error) =>
1292
+ Effect.gen(function* () {
1293
+ yield* Effect.log(`[ERROR] Record ${record.id} failed`);
1294
+
1295
+ return { success: false, error };
1296
+ })
1297
+ )
1298
+ )
1299
+ ),
1300
+ Stream.runCollect
1301
+ );
1302
+
1303
+ yield* Effect.log(
1304
+ `[RESULTS] ${successfulProcessing.filter((r) => r.success).length}/${records.length} succeeded\n`
1305
+ );
1306
+
1307
+ // Example 2: Recover with fallback value
1308
+ console.log(`[2] Providing fallback on error:\n`);
1309
+
1310
+ const getData = (id: string): Effect.Effect<number> =>
1311
+ id.includes("fail") ? Effect.fail(new Error("Data error")) : Effect.succeed(42);
1312
+
1313
+ const recovered = yield* Stream.fromIterable(["ok1", "fail1", "ok2"]).pipe(
1314
+ Stream.mapEffect((id) =>
1315
+ getData(id).pipe(
1316
+ Effect.catchAll(() =>
1317
+ Effect.gen(function* () {
1318
+ yield* Effect.log(`[FALLBACK] Using default for ${id}`);
1319
+
1320
+ return -1; // Fallback value
1321
+ })
1322
+ )
1323
+ )
1324
+ ),
1325
+ Stream.runCollect
1326
+ );
1327
+
1328
+ yield* Effect.log(`[VALUES] ${recovered.join(", ")}\n`);
1329
+
1330
+ // Example 3: Collect errors alongside successes
1331
+ console.log(`[3] Collecting errors and successes:\n`);
1332
+
1333
+ const results = yield* Ref.make<ProcessingResult>({
1334
+ successful: [],
1335
+ failed: [],
1336
+ retried: 0,
1337
+ });
1338
+
1339
+ yield* Stream.fromIterable(records).pipe(
1340
+ Stream.mapEffect((record) =>
1341
+ processElement(record).pipe(
1342
+ Effect.tap((result) =>
1343
+ Ref.modify(results, (r) => [
1344
+ undefined,
1345
+ {
1346
+ ...r,
1347
+ successful: [...r.successful, record],
1348
+ },
1349
+ ])
1350
+ ),
1351
+ Effect.catchAll((error) =>
1352
+ Ref.modify(results, (r) => [
1353
+ undefined,
1354
+ {
1355
+ ...r,
1356
+ failed: [
1357
+ ...r.failed,
1358
+ { id: record.id, error: error.message },
1359
+ ],
1360
+ },
1361
+ ])
1362
+ )
1363
+ )
1364
+ ),
1365
+ Stream.runDrain
1366
+ );
1367
+
1368
+ const finalResults = yield* Ref.get(results);
1369
+
1370
+ yield* Effect.log(
1371
+ `[AGGREGATE] ${finalResults.successful.length} succeeded, ${finalResults.failed.length} failed`
1372
+ );
1373
+
1374
+ for (const failure of finalResults.failed) {
1375
+ yield* Effect.log(` - ${failure.id}: ${failure.error}`);
1376
+ }
1377
+
1378
+ // Example 4: Retry on error with backoff
1379
+ console.log(`\n[4] Retry with exponential backoff:\n`);
1380
+
1381
+ let attemptCount = 0;
1382
+
1383
+ const unreliableOperation = (id: string): Effect.Effect<string> =>
1384
+ Effect.gen(function* () {
1385
+ attemptCount++;
1386
+
1387
+ if (attemptCount <= 2) {
1388
+ yield* Effect.log(`[ATTEMPT ${attemptCount}] Failing for ${id}`);
1389
+
1390
+ yield* Effect.fail(new Error("Temporary failure"));
1391
+ }
1392
+
1393
+ yield* Effect.log(`[SUCCESS] Succeeded on attempt ${attemptCount}`);
1394
+
1395
+ return `result-${id}`;
1396
+ });
1397
+
1398
+ const retried = unreliableOperation("test").pipe(
1399
+ Effect.retry(
1400
+ Schedule.exponential("10 millis").pipe(
1401
+ Schedule.upTo("100 millis"),
1402
+ Schedule.recurs(3)
1403
+ )
1404
+ ),
1405
+ Effect.catchAll((error) =>
1406
+ Effect.gen(function* () {
1407
+ yield* Effect.log(`[EXHAUSTED] All retries failed`);
1408
+
1409
+ return "fallback";
1410
+ })
1411
+ )
1412
+ );
1413
+
1414
+ yield* retried;
1415
+
1416
+ // Example 5: Error context in streams
1417
+ console.log(`\n[5] Propagating error context:\n`);
1418
+
1419
+ interface StreamContext {
1420
+ batchId: string;
1421
+ timestamp: Date;
1422
+ }
1423
+
1424
+ const processWithContext = (context: StreamContext) =>
1425
+ Stream.fromIterable([1, 2, -3, 4]).pipe(
1426
+ Stream.mapEffect((value) =>
1427
+ Effect.gen(function* () {
1428
+ if (value < 0) {
1429
+ yield* Effect.fail(
1430
+ new Error(
1431
+ `Negative value in batch ${context.batchId} at ${context.timestamp.toISOString()}`
1432
+ )
1433
+ );
1434
+ }
1435
+
1436
+ return value * 2;
1437
+ })
1438
+ ),
1439
+ Stream.catchAll((error) =>
1440
+ Effect.gen(function* () {
1441
+ yield* Effect.log(`[CONTEXT ERROR] ${error.message}`);
1442
+
1443
+ return Stream.empty;
1444
+ })
1445
+ )
1446
+ );
1447
+
1448
+ const context: StreamContext = {
1449
+ batchId: "batch-001",
1450
+ timestamp: new Date(),
1451
+ };
1452
+
1453
+ yield* processWithContext(context).pipe(Stream.runDrain);
1454
+
1455
+ // Example 6: Partial recovery (keep good data, log bad)
1456
+ console.log(`\n[6] Partial recovery strategy:\n`);
1457
+
1458
+ const mixedQuality = [
1459
+ { id: "1", data: "good" },
1460
+ { id: "2", data: "bad" },
1461
+ { id: "3", data: "good" },
1462
+ { id: "4", data: "bad" },
1463
+ { id: "5", data: "good" },
1464
+ ];
1465
+
1466
+ const processQuality = (record: { id: string; data: string }) =>
1467
+ record.data === "good"
1468
+ ? Effect.succeed(`valid-${record.id}`)
1469
+ : Effect.fail(new Error(`Invalid data for ${record.id}`));
1470
+
1471
+ const partialResults = yield* Stream.fromIterable(mixedQuality).pipe(
1472
+ Stream.mapEffect((record) =>
1473
+ processQuality(record).pipe(
1474
+ Effect.catchAll((error) =>
1475
+ Effect.gen(function* () {
1476
+ yield* Effect.log(`[LOG] ${error.message}`);
1477
+
1478
+ return null; // Skip this record
1479
+ })
1480
+ )
1481
+ )
1482
+ ),
1483
+ Stream.filter((result) => result !== null),
1484
+ Stream.runCollect
1485
+ );
1486
+
1487
+ yield* Effect.log(
1488
+ `[PARTIAL] Kept ${partialResults.length}/${mixedQuality.length} valid records\n`
1489
+ );
1490
+
1491
+ // Example 7: Timeout handling in streams
1492
+ console.log(`[7] Timeout handling per element:\n`);
1493
+
1494
+ const slowOperation = (id: string): Effect.Effect<string> =>
1495
+ Effect.gen(function* () {
1496
+ // Simulate slow operations
1497
+ if (id === "slow") {
1498
+ yield* Effect.sleep("200 millis");
1499
+ } else {
1500
+ yield* Effect.sleep("50 millis");
1501
+ }
1502
+
1503
+ return `done-${id}`;
1504
+ });
1505
+
1506
+ const withTimeout = yield* Stream.fromIterable(["fast1", "slow", "fast2"]).pipe(
1507
+ Stream.mapEffect((id) =>
1508
+ slowOperation(id).pipe(
1509
+ Effect.timeout("100 millis"),
1510
+ Effect.catchAll((error) =>
1511
+ Effect.gen(function* () {
1512
+ yield* Effect.log(`[TIMEOUT] Operation ${id} timed out`);
1513
+
1514
+ return "timeout-fallback";
1515
+ })
1516
+ )
1517
+ )
1518
+ ),
1519
+ Stream.runCollect
1520
+ );
1521
+
1522
+ yield* Effect.log(`[RESULTS] ${withTimeout.join(", ")}\n`);
1523
+
1524
+ // Example 8: Stream termination on critical error
1525
+ console.log(`[8] Terminating stream on critical error:\n`);
1526
+
1527
+ const isCritical = (error: Error): boolean =>
1528
+ error.message.includes("CRITICAL");
1529
+
1530
+ const terminateOnCritical = Stream.fromIterable([1, 2, 3]).pipe(
1531
+ Stream.mapEffect((value) =>
1532
+ value === 2
1533
+ ? Effect.fail(new Error("CRITICAL: System failure"))
1534
+ : Effect.succeed(value)
1535
+ ),
1536
+ Stream.catchAll((error) =>
1537
+ Effect.gen(function* () {
1538
+ if (isCritical(error)) {
1539
+ yield* Effect.log(`[CRITICAL] Terminating stream`);
1540
+
1541
+ return Stream.fail(error);
1542
+ }
1543
+
1544
+ yield* Effect.log(`[WARNING] Continuing despite error`);
1545
+
1546
+ return Stream.empty;
1547
+ })
1548
+ )
1549
+ );
1550
+
1551
+ yield* terminateOnCritical.pipe(
1552
+ Stream.runCollect,
1553
+ Effect.catchAll((error) =>
1554
+ Effect.gen(function* () {
1555
+ yield* Effect.log(`[STOPPED] Stream stopped: ${error.message}`);
1556
+
1557
+ return [];
1558
+ })
1559
+ )
1560
+ );
1561
+ });
1562
+
1563
+ Effect.runPromise(program);
1564
+ ```
1565
+
1566
+ ---
1567
+
1568
+ **Rationale:**
1569
+
1570
+ Stream error handling enables resilience:
1571
+
1572
+ - **Continue on error**: Skip failed element, process rest
1573
+ - **Recover**: Provide fallback value
1574
+ - **Retry**: Attempt failed element again
1575
+ - **Aggregate**: Collect errors alongside successful values
1576
+ - **Terminate gracefully**: Controlled shutdown
1577
+ - **Propagate**: Let errors flow upstream
1578
+
1579
+ Pattern: `Stream.catchAll()`, `Stream.retry()`, `Stream.recover()`, `Stream.runCollect()`
1580
+
1581
+ ---
1582
+
1583
+
1584
+ Errors in streams cause cascading failures:
1585
+
1586
+ **Problem 1: Stream death**
1587
+ - Process 10,000 records
1588
+ - Record #5000 has bad data
1589
+ - Stream crashes
1590
+ - 9,000 records not processed
1591
+ - Manual re-run needed
1592
+
1593
+ **Problem 2: Silent data loss**
1594
+ - Stream encounters error
1595
+ - Stops processing
1596
+ - Caller doesn't notice
1597
+ - Missing data goes undetected
1598
+ - Reports wrong numbers
1599
+
1600
+ **Problem 3: No recovery visibility**
1601
+ - Error happens
1602
+ - Is it retried? How many times?
1603
+ - Did it recover?
1604
+ - Silent guessing required
1605
+
1606
+ **Problem 4: Downstream effects**
1607
+ - Stream error affects all subscribers
1608
+ - Cascading failure
1609
+ - System becomes unavailable
1610
+ - All downstream blocked
1611
+
1612
+ Solutions:
1613
+
1614
+ **Continue on error**:
1615
+ - Skip failed element
1616
+ - Process rest of stream
1617
+ - Collect error for later
1618
+ - Partial success acceptable
1619
+
1620
+ **Retry with backoff**:
1621
+ - Transient error? Retry
1622
+ - Exponential backoff
1623
+ - Eventually give up
1624
+ - Move to next element
1625
+
1626
+ **Error aggregation**:
1627
+ - Collect all errors
1628
+ - Collect all successes
1629
+ - Report both
1630
+ - Analytics/debugging
1631
+
1632
+ **Graceful termination**:
1633
+ - Signal end of stream on error
1634
+ - Allow cleanup
1635
+ - Prevent resource leak
1636
+ - Controlled shutdown
1637
+
1638
+ ---
1639
+
1640
+ ---
1641
+
1642
+ ### Stream Pattern 8: Advanced Stream Transformations
1643
+
1644
+ **Rule:** Use advanced stream operators to build sophisticated data pipelines that compose elegantly and maintain performance at scale.
1645
+
1646
+ **Good Example:**
1647
+
1648
+ This example demonstrates advanced stream transformations.
1649
+
1650
+ ```typescript
1651
+ import { Effect, Stream, Ref, Chunk } from "effect";
1652
+
1653
+ interface LogEntry {
1654
+ timestamp: Date;
1655
+ level: "info" | "warn" | "error";
1656
+ message: string;
1657
+ context?: Record<string, unknown>;
1658
+ }
1659
+
1660
+ interface Metric {
1661
+ name: string;
1662
+ value: number;
1663
+ tags: Record<string, string>;
1664
+ }
1665
+
1666
+ const program = Effect.gen(function* () {
1667
+ console.log(`\n[ADVANCED STREAM TRANSFORMATIONS] Complex data flows\n`);
1668
+
1669
+ // Example 1: Custom filter operator
1670
+ console.log(`[1] Custom filter with effect-based logic:\n`);
1671
+
1672
+ const filterByEffect = <A,>(
1673
+ predicate: (a: A) => Effect.Effect<boolean>
1674
+ ) =>
1675
+ (stream: Stream.Stream<A>) =>
1676
+ stream.pipe(
1677
+ Stream.mapEffect((value) =>
1678
+ predicate(value).pipe(
1679
+ Effect.map((keep) => (keep ? value : null))
1680
+ )
1681
+ ),
1682
+ Stream.filter((value) => value !== null)
1683
+ );
1684
+
1685
+ const isValid = (num: number): Effect.Effect<boolean> =>
1686
+ Effect.gen(function* () {
1687
+ // Simulate validation effect (e.g., API call)
1688
+ return num > 0 && num < 100;
1689
+ });
1690
+
1691
+ const numbers = [50, 150, 25, -10, 75];
1692
+
1693
+ const validNumbers = yield* Stream.fromIterable(numbers).pipe(
1694
+ filterByEffect(isValid),
1695
+ Stream.runCollect
1696
+ );
1697
+
1698
+ yield* Effect.log(`[VALID] ${validNumbers.join(", ")}\n`);
1699
+
1700
+ // Example 2: Enrichment transformation
1701
+ console.log(`[2] Enriching records with additional data:\n`);
1702
+
1703
+ interface RawRecord {
1704
+ id: string;
1705
+ value: number;
1706
+ }
1707
+
1708
+ interface EnrichedRecord {
1709
+ id: string;
1710
+ value: number;
1711
+ validated: boolean;
1712
+ processed: Date;
1713
+ metadata: Record<string, unknown>;
1714
+ }
1715
+
1716
+ const enrich = (record: RawRecord): Effect.Effect<EnrichedRecord> =>
1717
+ Effect.gen(function* () {
1718
+ // Simulate lookup/validation
1719
+ const validated = record.value > 0;
1720
+
1721
+ return {
1722
+ id: record.id,
1723
+ value: record.value,
1724
+ validated,
1725
+ processed: new Date(),
1726
+ metadata: { source: "stream" },
1727
+ };
1728
+ });
1729
+
1730
+ const rawData = [
1731
+ { id: "r1", value: 10 },
1732
+ { id: "r2", value: -5 },
1733
+ { id: "r3", value: 20 },
1734
+ ];
1735
+
1736
+ const enriched = yield* Stream.fromIterable(rawData).pipe(
1737
+ Stream.mapEffect((record) => enrich(record)),
1738
+ Stream.runCollect
1739
+ );
1740
+
1741
+ yield* Effect.log(`[ENRICHED] ${enriched.length} records enriched\n`);
1742
+
1743
+ // Example 3: Demultiplexing (split one stream into multiple)
1744
+ console.log(`[3] Demultiplexing by category:\n`);
1745
+
1746
+ interface Event {
1747
+ id: string;
1748
+ type: "click" | "view" | "purchase";
1749
+ data: unknown;
1750
+ }
1751
+
1752
+ const events: Event[] = [
1753
+ { id: "e1", type: "click", data: { x: 100, y: 200 } },
1754
+ { id: "e2", type: "view", data: { url: "/" } },
1755
+ { id: "e3", type: "purchase", data: { amount: 99.99 } },
1756
+ { id: "e4", type: "click", data: { x: 50, y: 100 } },
1757
+ ];
1758
+
1759
+ const clicks = yield* Stream.fromIterable(events).pipe(
1760
+ Stream.filter((e) => e.type === "click"),
1761
+ Stream.runCollect
1762
+ );
1763
+
1764
+ const views = yield* Stream.fromIterable(events).pipe(
1765
+ Stream.filter((e) => e.type === "view"),
1766
+ Stream.runCollect
1767
+ );
1768
+
1769
+ const purchases = yield* Stream.fromIterable(events).pipe(
1770
+ Stream.filter((e) => e.type === "purchase"),
1771
+ Stream.runCollect
1772
+ );
1773
+
1774
+ yield* Effect.log(
1775
+ `[DEMUX] Clicks: ${clicks.length}, Views: ${views.length}, Purchases: ${purchases.length}\n`
1776
+ );
1777
+
1778
+ // Example 4: Chunked processing (batch transformation)
1779
+ console.log(`[4] Chunked processing (batches of N):\n`);
1780
+
1781
+ const processChunk = (chunk: Array<{ id: string; value: number }>) =>
1782
+ Effect.gen(function* () {
1783
+ const sum = chunk.reduce((s, r) => s + r.value, 0);
1784
+ const avg = sum / chunk.length;
1785
+
1786
+ yield* Effect.log(
1787
+ `[CHUNK] ${chunk.length} items, avg: ${avg.toFixed(2)}`
1788
+ );
1789
+
1790
+ return { size: chunk.length, sum, avg };
1791
+ });
1792
+
1793
+ const data = Array.from({ length: 10 }, (_, i) => ({
1794
+ id: `d${i}`,
1795
+ value: i + 1,
1796
+ }));
1797
+
1798
+ const chunkSize = 3;
1799
+ const chunks = [];
1800
+
1801
+ for (let i = 0; i < data.length; i += chunkSize) {
1802
+ const chunk = data.slice(i, i + chunkSize);
1803
+
1804
+ chunks.push(chunk);
1805
+ }
1806
+
1807
+ const chunkResults = yield* Effect.all(
1808
+ chunks.map((chunk) => processChunk(chunk))
1809
+ );
1810
+
1811
+ yield* Effect.log(
1812
+ `[CHUNKS] Processed ${chunkResults.length} batches\n`
1813
+ );
1814
+
1815
+ // Example 5: Multi-stage transformation pipeline
1816
+ console.log(`[5] Multi-stage pipeline (parse → validate → transform):\n`);
1817
+
1818
+ const rawStrings = ["10", "twenty", "30", "-5", "50"];
1819
+
1820
+ // Stage 1: Parse
1821
+ const parsed = yield* Stream.fromIterable(rawStrings).pipe(
1822
+ Stream.mapEffect((s) =>
1823
+ Effect.gen(function* () {
1824
+ try {
1825
+ return parseInt(s);
1826
+ } catch (error) {
1827
+ yield* Effect.fail(
1828
+ new Error(`Failed to parse: ${s}`)
1829
+ );
1830
+ }
1831
+ }).pipe(
1832
+ Effect.catchAll((error) =>
1833
+ Effect.gen(function* () {
1834
+ yield* Effect.log(`[PARSE ERROR] ${error.message}`);
1835
+
1836
+ return null;
1837
+ })
1838
+ )
1839
+ )
1840
+ ),
1841
+ Stream.filter((n) => n !== null),
1842
+ Stream.runCollect
1843
+ );
1844
+
1845
+ yield* Effect.log(`[STAGE 1] Parsed: ${parsed.join(", ")}`);
1846
+
1847
+ // Stage 2: Validate
1848
+ const validated = parsed.filter((n) => n > 0);
1849
+
1850
+ yield* Effect.log(`[STAGE 2] Validated: ${validated.join(", ")}`);
1851
+
1852
+ // Stage 3: Transform
1853
+ const transformed = validated.map((n) => n * 2);
1854
+
1855
+ yield* Effect.log(`[STAGE 3] Transformed: ${transformed.join(", ")}\n`);
1856
+
1857
+ // Example 6: Composition of custom operators
1858
+ console.log(`[6] Composable transformation pipeline:\n`);
1859
+
1860
+ // Define custom operator
1861
+ const withLogging = <A,>(label: string) =>
1862
+ (stream: Stream.Stream<A>) =>
1863
+ stream.pipe(
1864
+ Stream.tap((value) =>
1865
+ Effect.log(`[${label}] Processing: ${JSON.stringify(value)}`)
1866
+ )
1867
+ );
1868
+
1869
+ const filterPositive = (stream: Stream.Stream<number>) =>
1870
+ stream.pipe(
1871
+ Stream.filter((n) => n > 0),
1872
+ Stream.tap(() => Effect.log(`[FILTER] Kept positive`))
1873
+ );
1874
+
1875
+ const scaleUp = (factor: number) =>
1876
+ (stream: Stream.Stream<number>) =>
1877
+ stream.pipe(
1878
+ Stream.map((n) => n * factor),
1879
+ Stream.tap((n) =>
1880
+ Effect.log(`[SCALE] Scaled to ${n}`)
1881
+ )
1882
+ );
1883
+
1884
+ const testData = [10, -5, 20, -3, 30];
1885
+
1886
+ const pipeline = yield* Stream.fromIterable(testData).pipe(
1887
+ withLogging("INPUT"),
1888
+ filterPositive,
1889
+ scaleUp(10),
1890
+ Stream.runCollect
1891
+ );
1892
+
1893
+ yield* Effect.log(`[RESULT] Final: ${pipeline.join(", ")}\n`);
1894
+
1895
+ // Example 7: Stateful transformation
1896
+ console.log(`[7] Stateful transformation (running total):\n`);
1897
+
1898
+ const runningTotal = yield* Stream.fromIterable([1, 2, 3, 4, 5]).pipe(
1899
+ Stream.scan(0, (acc, value) => acc + value),
1900
+ Stream.runCollect
1901
+ );
1902
+
1903
+ yield* Effect.log(`[TOTALS] ${runningTotal.join(", ")}\n`);
1904
+
1905
+ // Example 8: Conditional transformation
1906
+ console.log(`[8] Conditional transformation (different paths):\n`);
1907
+
1908
+ interface Item {
1909
+ id: string;
1910
+ priority: "high" | "normal" | "low";
1911
+ }
1912
+
1913
+ const transformByPriority = (item: Item): Effect.Effect<{
1914
+ id: string;
1915
+ processed: string;
1916
+ }> =>
1917
+ Effect.gen(function* () {
1918
+ switch (item.priority) {
1919
+ case "high":
1920
+ yield* Effect.log(`[HIGH] Priority processing for ${item.id}`);
1921
+
1922
+ return { id: item.id, processed: "urgent" };
1923
+
1924
+ case "normal":
1925
+ yield* Effect.log(
1926
+ `[NORMAL] Standard processing for ${item.id}`
1927
+ );
1928
+
1929
+ return { id: item.id, processed: "standard" };
1930
+
1931
+ case "low":
1932
+ yield* Effect.log(`[LOW] Deferred processing for ${item.id}`);
1933
+
1934
+ return { id: item.id, processed: "deferred" };
1935
+ }
1936
+ });
1937
+
1938
+ const items: Item[] = [
1939
+ { id: "i1", priority: "normal" },
1940
+ { id: "i2", priority: "high" },
1941
+ { id: "i3", priority: "low" },
1942
+ ];
1943
+
1944
+ const processed = yield* Stream.fromIterable(items).pipe(
1945
+ Stream.mapEffect((item) => transformByPriority(item)),
1946
+ Stream.runCollect
1947
+ );
1948
+
1949
+ yield* Effect.log(
1950
+ `[CONDITIONAL] Processed ${processed.length} items\n`
1951
+ );
1952
+
1953
+ // Example 9: Performance-optimized transformation
1954
+ console.log(`[9] Optimized for performance:\n`);
1955
+
1956
+ const largeDataset = Array.from({ length: 1000 }, (_, i) => i);
1957
+
1958
+ const startTime = Date.now();
1959
+
1960
+ // Use efficient operators
1961
+ const result = yield* Stream.fromIterable(largeDataset).pipe(
1962
+ Stream.filter((n) => n % 2 === 0), // Keep even
1963
+ Stream.take(100), // Limit to first 100
1964
+ Stream.map((n) => n * 2), // Transform
1965
+ Stream.runCollect
1966
+ );
1967
+
1968
+ const elapsed = Date.now() - startTime;
1969
+
1970
+ yield* Effect.log(
1971
+ `[PERF] Processed 1000 items in ${elapsed}ms, kept ${result.length} items`
1972
+ );
1973
+ });
1974
+
1975
+ Effect.runPromise(program);
1976
+ ```
1977
+
1978
+ ---
1979
+
1980
+ **Rationale:**
1981
+
1982
+ Advanced transformations enable complex data flows:
1983
+
1984
+ - **Custom operators**: Build reusable transformations
1985
+ - **Effect-based**: Transformations with side effects
1986
+ - **Lazy evaluation**: Compute only what's needed
1987
+ - **Fusion**: Optimize composed operations
1988
+ - **Staging**: Multiple transformation layers
1989
+ - **Composition**: Combine operators cleanly
1990
+
1991
+ Pattern: `Stream.mapEffect()`, `Stream.map()`, pipe composition
1992
+
1993
+ ---
1994
+
1995
+
1996
+ Simple transformations don't scale:
1997
+
1998
+ **Problem 1: Performance degradation**
1999
+ - Each layer creates intermediate collection
2000
+ - 10 transformations = 10 allocations
2001
+ - Process 1M items = 10M allocations
2002
+ - GC pressure, memory exhaustion
2003
+
2004
+ **Problem 2: Complex logic scattered**
2005
+ - Validation here, enrichment there, filtering elsewhere
2006
+ - Hard to maintain
2007
+ - Changes break other parts
2008
+ - No clear data flow
2009
+
2010
+ **Problem 3: Effect handling**
2011
+ - Transformations need side effects
2012
+ - Network calls, database queries
2013
+ - Naive approach: load all, transform sequentially
2014
+ - Slow, inefficient
2015
+
2016
+ **Problem 4: Reusability**
2017
+ - Custom transformation used once
2018
+ - Next time, rewrite from scratch
2019
+ - Code duplication
2020
+ - Bugs replicated
2021
+
2022
+ Solutions:
2023
+
2024
+ **Custom operators**:
2025
+ - Encapsulate transformation logic
2026
+ - Reusable across projects
2027
+ - Testable in isolation
2028
+ - Composable
2029
+
2030
+ **Lazy evaluation**:
2031
+ - Compute as elements flow
2032
+ - No intermediate collections
2033
+ - Constant memory
2034
+ - Only compute what's used
2035
+
2036
+ **Fusion**:
2037
+ - Combine multiple maps/filters
2038
+ - Single pass through data
2039
+ - No intermediate collections
2040
+ - Compiler/library optimizes
2041
+
2042
+ **Effect composition**:
2043
+ - Chain effects naturally
2044
+ - Error propagation automatic
2045
+ - Resource cleanup guaranteed
2046
+ - Readable code
2047
+
2048
+ ---
2049
+
2050
+ ---
2051
+
2052
+