@dojocho/effect-ts 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. package/DOJO.md +22 -0
  2. package/dojo.json +50 -0
  3. package/katas/001-hello-effect/SENSEI.md +72 -0
  4. package/katas/001-hello-effect/solution.test.ts +35 -0
  5. package/katas/001-hello-effect/solution.ts +16 -0
  6. package/katas/002-transform-with-map/SENSEI.md +72 -0
  7. package/katas/002-transform-with-map/solution.test.ts +33 -0
  8. package/katas/002-transform-with-map/solution.ts +16 -0
  9. package/katas/003-generator-pipelines/SENSEI.md +72 -0
  10. package/katas/003-generator-pipelines/solution.test.ts +40 -0
  11. package/katas/003-generator-pipelines/solution.ts +29 -0
  12. package/katas/004-flatmap-and-chaining/SENSEI.md +80 -0
  13. package/katas/004-flatmap-and-chaining/solution.test.ts +34 -0
  14. package/katas/004-flatmap-and-chaining/solution.ts +18 -0
  15. package/katas/005-pipe-composition/SENSEI.md +81 -0
  16. package/katas/005-pipe-composition/solution.test.ts +41 -0
  17. package/katas/005-pipe-composition/solution.ts +19 -0
  18. package/katas/006-handle-errors/SENSEI.md +86 -0
  19. package/katas/006-handle-errors/solution.test.ts +53 -0
  20. package/katas/006-handle-errors/solution.ts +30 -0
  21. package/katas/007-tagged-errors/SENSEI.md +79 -0
  22. package/katas/007-tagged-errors/solution.test.ts +82 -0
  23. package/katas/007-tagged-errors/solution.ts +37 -0
  24. package/katas/008-error-patterns/SENSEI.md +89 -0
  25. package/katas/008-error-patterns/solution.test.ts +41 -0
  26. package/katas/008-error-patterns/solution.ts +38 -0
  27. package/katas/009-option-type/SENSEI.md +96 -0
  28. package/katas/009-option-type/solution.test.ts +49 -0
  29. package/katas/009-option-type/solution.ts +26 -0
  30. package/katas/010-either-and-exit/SENSEI.md +86 -0
  31. package/katas/010-either-and-exit/solution.test.ts +33 -0
  32. package/katas/010-either-and-exit/solution.ts +17 -0
  33. package/katas/011-services-and-context/SENSEI.md +82 -0
  34. package/katas/011-services-and-context/solution.test.ts +23 -0
  35. package/katas/011-services-and-context/solution.ts +17 -0
  36. package/katas/012-layers/SENSEI.md +73 -0
  37. package/katas/012-layers/solution.test.ts +23 -0
  38. package/katas/012-layers/solution.ts +26 -0
  39. package/katas/013-testing-effects/SENSEI.md +88 -0
  40. package/katas/013-testing-effects/solution.test.ts +41 -0
  41. package/katas/013-testing-effects/solution.ts +20 -0
  42. package/katas/014-schema-basics/SENSEI.md +81 -0
  43. package/katas/014-schema-basics/solution.test.ts +35 -0
  44. package/katas/014-schema-basics/solution.ts +25 -0
  45. package/katas/015-domain-modeling/SENSEI.md +85 -0
  46. package/katas/015-domain-modeling/solution.test.ts +46 -0
  47. package/katas/015-domain-modeling/solution.ts +42 -0
  48. package/katas/016-retry-and-schedule/SENSEI.md +72 -0
  49. package/katas/016-retry-and-schedule/solution.test.ts +26 -0
  50. package/katas/016-retry-and-schedule/solution.ts +23 -0
  51. package/katas/017-parallel-effects/SENSEI.md +70 -0
  52. package/katas/017-parallel-effects/solution.test.ts +33 -0
  53. package/katas/017-parallel-effects/solution.ts +17 -0
  54. package/katas/018-race-and-timeout/SENSEI.md +75 -0
  55. package/katas/018-race-and-timeout/solution.test.ts +30 -0
  56. package/katas/018-race-and-timeout/solution.ts +27 -0
  57. package/katas/019-ref-and-state/SENSEI.md +72 -0
  58. package/katas/019-ref-and-state/solution.test.ts +29 -0
  59. package/katas/019-ref-and-state/solution.ts +16 -0
  60. package/katas/020-fibers/SENSEI.md +80 -0
  61. package/katas/020-fibers/solution.test.ts +23 -0
  62. package/katas/020-fibers/solution.ts +23 -0
  63. package/katas/021-acquire-release/SENSEI.md +57 -0
  64. package/katas/021-acquire-release/solution.test.ts +23 -0
  65. package/katas/021-acquire-release/solution.ts +22 -0
  66. package/katas/022-scoped-layers/SENSEI.md +52 -0
  67. package/katas/022-scoped-layers/solution.test.ts +35 -0
  68. package/katas/022-scoped-layers/solution.ts +19 -0
  69. package/katas/023-resource-patterns/SENSEI.md +52 -0
  70. package/katas/023-resource-patterns/solution.test.ts +20 -0
  71. package/katas/023-resource-patterns/solution.ts +13 -0
  72. package/katas/024-streams-basics/SENSEI.md +61 -0
  73. package/katas/024-streams-basics/solution.test.ts +30 -0
  74. package/katas/024-streams-basics/solution.ts +16 -0
  75. package/katas/025-stream-operations/SENSEI.md +59 -0
  76. package/katas/025-stream-operations/solution.test.ts +26 -0
  77. package/katas/025-stream-operations/solution.ts +17 -0
  78. package/katas/026-combining-streams/SENSEI.md +54 -0
  79. package/katas/026-combining-streams/solution.test.ts +20 -0
  80. package/katas/026-combining-streams/solution.ts +16 -0
  81. package/katas/027-data-pipelines/SENSEI.md +58 -0
  82. package/katas/027-data-pipelines/solution.test.ts +22 -0
  83. package/katas/027-data-pipelines/solution.ts +16 -0
  84. package/katas/028-logging-and-spans/SENSEI.md +58 -0
  85. package/katas/028-logging-and-spans/solution.test.ts +50 -0
  86. package/katas/028-logging-and-spans/solution.ts +20 -0
  87. package/katas/029-http-client/SENSEI.md +59 -0
  88. package/katas/029-http-client/solution.test.ts +49 -0
  89. package/katas/029-http-client/solution.ts +24 -0
  90. package/katas/030-capstone/SENSEI.md +63 -0
  91. package/katas/030-capstone/solution.test.ts +67 -0
  92. package/katas/030-capstone/solution.ts +55 -0
  93. package/katas/031-config-and-environment/SENSEI.md +77 -0
  94. package/katas/031-config-and-environment/solution.test.ts +38 -0
  95. package/katas/031-config-and-environment/solution.ts +11 -0
  96. package/katas/032-cause-and-defects/SENSEI.md +90 -0
  97. package/katas/032-cause-and-defects/solution.test.ts +50 -0
  98. package/katas/032-cause-and-defects/solution.ts +23 -0
  99. package/katas/033-pattern-matching/SENSEI.md +86 -0
  100. package/katas/033-pattern-matching/solution.test.ts +36 -0
  101. package/katas/033-pattern-matching/solution.ts +28 -0
  102. package/katas/034-deferred-and-coordination/SENSEI.md +85 -0
  103. package/katas/034-deferred-and-coordination/solution.test.ts +25 -0
  104. package/katas/034-deferred-and-coordination/solution.ts +24 -0
  105. package/katas/035-queue-and-backpressure/SENSEI.md +100 -0
  106. package/katas/035-queue-and-backpressure/solution.test.ts +25 -0
  107. package/katas/035-queue-and-backpressure/solution.ts +21 -0
  108. package/katas/036-schema-advanced/SENSEI.md +81 -0
  109. package/katas/036-schema-advanced/solution.test.ts +55 -0
  110. package/katas/036-schema-advanced/solution.ts +19 -0
  111. package/katas/037-cache-and-memoization/SENSEI.md +73 -0
  112. package/katas/037-cache-and-memoization/solution.test.ts +47 -0
  113. package/katas/037-cache-and-memoization/solution.ts +24 -0
  114. package/katas/038-metrics/SENSEI.md +91 -0
  115. package/katas/038-metrics/solution.test.ts +39 -0
  116. package/katas/038-metrics/solution.ts +23 -0
  117. package/katas/039-managed-runtime/SENSEI.md +75 -0
  118. package/katas/039-managed-runtime/solution.test.ts +29 -0
  119. package/katas/039-managed-runtime/solution.ts +19 -0
  120. package/katas/040-request-batching/SENSEI.md +87 -0
  121. package/katas/040-request-batching/solution.test.ts +56 -0
  122. package/katas/040-request-batching/solution.ts +32 -0
  123. package/package.json +22 -0
  124. package/skills/effect-patterns-building-apis/SKILL.md +2393 -0
  125. package/skills/effect-patterns-building-data-pipelines/SKILL.md +1876 -0
  126. package/skills/effect-patterns-concurrency/SKILL.md +2999 -0
  127. package/skills/effect-patterns-concurrency-getting-started/SKILL.md +351 -0
  128. package/skills/effect-patterns-core-concepts/SKILL.md +3199 -0
  129. package/skills/effect-patterns-domain-modeling/SKILL.md +1385 -0
  130. package/skills/effect-patterns-error-handling/SKILL.md +1212 -0
  131. package/skills/effect-patterns-error-handling-resilience/SKILL.md +179 -0
  132. package/skills/effect-patterns-error-management/SKILL.md +1668 -0
  133. package/skills/effect-patterns-getting-started/SKILL.md +237 -0
  134. package/skills/effect-patterns-making-http-requests/SKILL.md +1756 -0
  135. package/skills/effect-patterns-observability/SKILL.md +1586 -0
  136. package/skills/effect-patterns-platform/SKILL.md +1195 -0
  137. package/skills/effect-patterns-platform-getting-started/SKILL.md +179 -0
  138. package/skills/effect-patterns-project-setup--execution/SKILL.md +233 -0
  139. package/skills/effect-patterns-resource-management/SKILL.md +827 -0
  140. package/skills/effect-patterns-scheduling/SKILL.md +451 -0
  141. package/skills/effect-patterns-scheduling-periodic-tasks/SKILL.md +763 -0
  142. package/skills/effect-patterns-streams/SKILL.md +2052 -0
  143. package/skills/effect-patterns-streams-getting-started/SKILL.md +421 -0
  144. package/skills/effect-patterns-streams-sinks/SKILL.md +1181 -0
  145. package/skills/effect-patterns-testing/SKILL.md +1632 -0
  146. package/skills/effect-patterns-tooling-and-debugging/SKILL.md +1125 -0
  147. package/skills/effect-patterns-value-handling/SKILL.md +676 -0
  148. package/tsconfig.json +20 -0
  149. package/vitest.config.ts +3 -0
@@ -0,0 +1,1876 @@
1
+ ---
2
+ name: effect-patterns-building-data-pipelines
3
+ description: Effect-TS patterns for Building Data Pipelines. Use when working with building data pipelines in Effect-TS applications.
4
+ ---
5
+ # Effect-TS Patterns: Building Data Pipelines
6
+ This skill provides 14 curated Effect-TS patterns for building data pipelines.
7
+ Use this skill when working on tasks related to:
8
+ - building data pipelines
9
+ - Best practices in Effect-TS applications
10
+ - Real-world patterns and solutions
11
+
12
+ ---
13
+
14
+ ## 🟢 Beginner Patterns
15
+
16
+ ### Create a Stream from a List
17
+
18
+ **Rule:** Use Stream.fromIterable to begin a pipeline from an in-memory collection.
19
+
20
+ **Good Example:**
21
+
22
+ This example takes a simple array of numbers, creates a stream from it, performs a transformation on each number, and then runs the stream to collect the results.
23
+
24
+ ```typescript
25
+ import { Effect, Stream, Chunk } from "effect";
26
+
27
+ const numbers = [1, 2, 3, 4, 5];
28
+
29
+ // Create a stream from the array of numbers.
30
+ const program = Stream.fromIterable(numbers).pipe(
31
+ // Perform a simple, synchronous transformation on each item.
32
+ Stream.map((n) => `Item: ${n}`),
33
+ // Run the stream and collect all the transformed items into a Chunk.
34
+ Stream.runCollect
35
+ );
36
+
37
+ const programWithLogging = Effect.gen(function* () {
38
+ const processedItems = yield* program;
39
+ yield* Effect.log(
40
+ `Processed items: ${JSON.stringify(Chunk.toArray(processedItems))}`
41
+ );
42
+ return processedItems;
43
+ });
44
+
45
+ Effect.runPromise(programWithLogging);
46
+ /*
47
+ Output:
48
+ [ 'Item: 1', 'Item: 2', 'Item: 3', 'Item: 4', 'Item: 5' ]
49
+ */
50
+ ```
51
+
52
+ **Anti-Pattern:**
53
+
54
+ The common alternative is to use standard array methods like `.map()` or a `for...of` loop. While perfectly fine for simple, synchronous tasks, this approach is an anti-pattern when building a _pipeline_.
55
+
56
+ ```typescript
57
+ const numbers = [1, 2, 3, 4, 5];
58
+
59
+ // Using Array.prototype.map
60
+ const processedItems = numbers.map((n) => `Item: ${n}`);
61
+
62
+ console.log(processedItems);
63
+ ```
64
+
65
+ This is an anti-pattern in the context of building a larger pipeline because:
66
+
67
+ 1. **It's Not Composable with Effects**: The result is just a new array. If the next step in your pipeline was an asynchronous database call for each item, you couldn't simply `.pipe()` the result into it. You would have to leave the synchronous world of `.map()` and start a new `Effect.forEach`, breaking the unified pipeline structure.
68
+ 2. **It's Eager**: The `.map()` operation processes the entire array at once. `Stream` is lazy; it only processes items as they are requested by downstream consumers, which is far more efficient for large collections or complex transformations.
69
+
70
+ **Rationale:**
71
+
72
+ To start a data pipeline from an existing in-memory collection like an array, use `Stream.fromIterable`.
73
+
74
+ ---
75
+
76
+
77
+ Every data pipeline needs a source. The simplest and most common source is a pre-existing list of items in memory. `Stream.fromIterable` is the bridge from standard JavaScript data structures to the powerful, composable world of Effect's `Stream`.
78
+
79
+ This pattern is fundamental for several reasons:
80
+
81
+ 1. **Entry Point**: It's the "Hello, World!" of data pipelines, providing the easiest way to start experimenting with stream transformations.
82
+ 2. **Testing**: In tests, you frequently need to simulate a data source (like a database query or API call). Creating a stream from a mock array of data is the standard way to do this, allowing you to test your pipeline's logic in isolation.
83
+ 3. **Composability**: It transforms a static, eager data structure (an array) into a lazy, pull-based stream. This allows you to pipe it into the rest of the Effect ecosystem, enabling asynchronous operations, concurrency, and resource management in subsequent steps.
84
+
85
+ ---
86
+
87
+ ---
88
+
89
+ ### Run a Pipeline for its Side Effects
90
+
91
+ **Rule:** Use Stream.runDrain to execute a stream for its side effects when you don't need the final values.
92
+
93
+ **Good Example:**
94
+
95
+ This example creates a stream of tasks. For each task, it performs a side effect (logging it as "complete"). `Stream.runDrain` executes the pipeline, ensuring all logs are written, but without collecting the `void` results of each logging operation.
96
+
97
+ ```typescript
98
+ import { Effect, Stream } from "effect";
99
+
100
+ const tasks = ["task 1", "task 2", "task 3"];
101
+
102
+ // A function that performs a side effect for a task
103
+ const completeTask = (task: string): Effect.Effect<void, never> =>
104
+ Effect.log(`Completing ${task}`);
105
+
106
+ const program = Stream.fromIterable(tasks).pipe(
107
+ // For each task, run the side-effectful operation
108
+ Stream.mapEffect(completeTask, { concurrency: 1 }),
109
+ // Run the stream for its effects, discarding the `void` results
110
+ Stream.runDrain
111
+ );
112
+
113
+ const programWithLogging = Effect.gen(function* () {
114
+ yield* program;
115
+ yield* Effect.log("\nAll tasks have been processed.");
116
+ });
117
+
118
+ Effect.runPromise(programWithLogging);
119
+ /*
120
+ Output:
121
+ ... level=INFO msg="Completing task 1"
122
+ ... level=INFO msg="Completing task 2"
123
+ ... level=INFO msg="Completing task 3"
124
+
125
+ All tasks have been processed.
126
+ */
127
+ ```
128
+
129
+ **Anti-Pattern:**
130
+
131
+ The anti-pattern is using `Stream.runCollect` when you only care about the side effects. This needlessly consumes memory and can lead to crashes.
132
+
133
+ ```typescript
134
+ import { Effect, Stream } from "effect";
135
+ // ... same tasks and completeTask function ...
136
+
137
+ const program = Stream.fromIterable(tasks).pipe(
138
+ Stream.mapEffect(completeTask, { concurrency: 1 }),
139
+ // Anti-pattern: Collecting results that we are just going to ignore
140
+ Stream.runCollect
141
+ );
142
+
143
+ Effect.runPromise(program).then((results) => {
144
+ // The `results` variable here is a Chunk of `[void, void, void]`.
145
+ // It served no purpose but consumed memory.
146
+ console.log(
147
+ `\nAll tasks processed. Unnecessarily collected ${results.length} empty results.`
148
+ );
149
+ });
150
+ ```
151
+
152
+ While this works for a small array of three items, it's a dangerous habit. If the `tasks` array contained millions of items, this code would create a `Chunk` with millions of `void` values, consuming a significant amount of memory for no reason and potentially crashing the application. `Stream.runDrain` avoids this problem entirely.
153
+
154
+ **Rationale:**
155
+
156
+ To run a stream purely for its side effects without accumulating the results in memory, use the `Stream.runDrain` sink.
157
+
158
+ ---
159
+
160
+
161
+ Not all pipelines are designed to produce a final list of values. Often, the goal is to perform an action for each item—write it to a database, send it to a message queue, or log it to a file. In these "fire and forget" scenarios, collecting the results is not just unnecessary; it's a performance anti-pattern.
162
+
163
+ `Stream.runDrain` is the perfect tool for this job:
164
+
165
+ 1. **Memory Efficiency**: This is its primary advantage. `runDrain` processes each item and then immediately discards it, resulting in constant, minimal memory usage. This makes it the only safe choice for processing extremely large or infinite streams.
166
+ 2. **Clarity of Intent**: Using `runDrain` clearly communicates that you are interested in the successful execution of the stream's effects, not in its output values. The final `Effect` it produces resolves to `void`, reinforcing that no value is returned.
167
+ 3. **Performance**: By avoiding the overhead of allocating and managing a growing list in memory, `runDrain` can be faster for pipelines with a very large number of small items.
168
+
169
+ ---
170
+
171
+ ---
172
+
173
+ ### Collect All Results into a List
174
+
175
+ **Rule:** Use Stream.runCollect to execute a stream and collect all its emitted values into a Chunk.
176
+
177
+ **Good Example:**
178
+
179
+ This example creates a stream of numbers, filters for only the even ones, transforms them into strings, and then uses `runCollect` to gather the final results into a `Chunk`.
180
+
181
+ ```typescript
182
+ import { Effect, Stream, Chunk } from "effect";
183
+
184
+ const program = Stream.range(1, 10).pipe(
185
+ // Find all the even numbers
186
+ Stream.filter((n) => n % 2 === 0),
187
+ // Transform them into strings
188
+ Stream.map((n) => `Even number: ${n}`),
189
+ // Run the stream and collect the results
190
+ Stream.runCollect
191
+ );
192
+
193
+ const programWithLogging = Effect.gen(function* () {
194
+ const results = yield* program;
195
+ yield* Effect.log(
196
+ `Collected results: ${JSON.stringify(Chunk.toArray(results))}`
197
+ );
198
+ return results;
199
+ });
200
+
201
+ Effect.runPromise(programWithLogging);
202
+ /*
203
+ Output:
204
+ Collected results: [
205
+ 'Even number: 2',
206
+ 'Even number: 4',
207
+ 'Even number: 6',
208
+ 'Even number: 8',
209
+ 'Even number: 10'
210
+ ]
211
+ */
212
+ ```
213
+
214
+ **Anti-Pattern:**
215
+
216
+ The anti-pattern is using `Stream.runCollect` on a stream that produces an unbounded or extremely large number of items. This will inevitably lead to an out-of-memory error.
217
+
218
+ ```typescript
219
+ import { Effect, Stream } from "effect";
220
+
221
+ // An infinite stream of numbers
222
+ const infiniteStream = Stream.range(1, Infinity);
223
+
224
+ const program = infiniteStream.pipe(
225
+ // This will run forever, attempting to buffer an infinite number of items.
226
+ Stream.runCollect
227
+ );
228
+
229
+ // This program will never finish and will eventually crash the process
230
+ // by consuming all available memory.
231
+ // Effect.runPromise(program);
232
+ console.log(
233
+ "This code is commented out because it would cause an out-of-memory crash."
234
+ );
235
+ ```
236
+
237
+ This is a critical mistake because `runCollect` must hold every single item emitted by the stream in memory simultaneously. For pipelines that process huge files, infinite data sources, or are designed to run forever, `runCollect` is the wrong tool. In those cases, you should use a sink like `Stream.runDrain`, which processes items without collecting them.
238
+
239
+ **Rationale:**
240
+
241
+ To execute a stream and collect all of its emitted values into a single, in-memory list, use the `Stream.runCollect` sink.
242
+
243
+ ---
244
+
245
+
246
+ A "sink" is a terminal operator that consumes a stream and produces a final `Effect`. `Stream.runCollect` is the most fundamental sink. It provides the bridge from the lazy, pull-based world of `Stream` back to the familiar world of a single `Effect` that resolves with a standard data structure.
247
+
248
+ Using `Stream.runCollect` is essential when:
249
+
250
+ 1. **You Need the Final Result**: The goal of your pipeline is to produce a complete list of transformed items that you need to use in a subsequent step (e.g., to return as a single JSON array from an API).
251
+ 2. **Simplicity is Key**: It's the most straightforward way to "run" a stream and see its output. It declaratively states your intent: "execute this entire pipeline and give me all the results."
252
+ 3. **The Dataset is Bounded**: It's designed for streams where the total number of items is known to be finite and small enough to fit comfortably in memory.
253
+
254
+ The result of `Stream.runCollect` is an `Effect` that, when executed, yields a `Chunk` containing all the items emitted by the stream.
255
+
256
+ ---
257
+
258
+ ---
259
+
260
+
261
+ ## 🟡 Intermediate Patterns
262
+
263
+ ### Turn a Paginated API into a Single Stream
264
+
265
+ **Rule:** Use Stream.paginateEffect to model a paginated data source as a single, continuous stream.
266
+
267
+ **Good Example:**
268
+
269
+ This example simulates fetching users from a paginated API. The `fetchUsersPage` function gets one page of data and returns the next page number. `Stream.paginateEffect` uses this function to create a single stream of all users across all pages.
270
+
271
+ ```typescript
272
+ import { Effect, Stream, Chunk, Option } from "effect";
273
+
274
+ // --- Mock Paginated API ---
275
+ interface User {
276
+ id: number;
277
+ name: string;
278
+ }
279
+
280
+ // Define FetchError as a class with a literal type tag
281
+ class FetchError {
282
+ readonly _tag = "FetchError" as const;
283
+ constructor(readonly message: string) {}
284
+ }
285
+
286
+ // Helper to create FetchError instances
287
+ const fetchError = (message: string): FetchError => new FetchError(message);
288
+
289
+ const allUsers: User[] = Array.from({ length: 25 }, (_, i) => ({
290
+ id: i + 1,
291
+ name: `User ${i + 1}`,
292
+ }));
293
+
294
+ // This function simulates fetching a page of users from an API.
295
+ const fetchUsersPage = (
296
+ page: number
297
+ ): Effect.Effect<[Chunk.Chunk<User>, Option.Option<number>], FetchError> =>
298
+ Effect.gen(function* () {
299
+ const pageSize = 10;
300
+ const offset = (page - 1) * pageSize;
301
+
302
+ // Simulate potential API errors
303
+ if (page < 1) {
304
+ return yield* Effect.fail(fetchError("Invalid page number"));
305
+ }
306
+
307
+ const users = Chunk.fromIterable(allUsers.slice(offset, offset + pageSize));
308
+
309
+ const nextPage =
310
+ Chunk.isNonEmpty(users) && allUsers.length > offset + pageSize
311
+ ? Option.some(page + 1)
312
+ : Option.none();
313
+
314
+ yield* Effect.log(`Fetched page ${page}`);
315
+ return [users, nextPage];
316
+ });
317
+
318
+ // --- The Pattern ---
319
+ // Use paginateEffect, providing an initial state (page 1) and the fetch function.
320
+ const userStream = Stream.paginateEffect(1, fetchUsersPage);
321
+
322
+ const program = userStream.pipe(
323
+ Stream.runCollect,
324
+ Effect.map((users) => users.length),
325
+ Effect.tap((totalUsers) => Effect.log(`Total users fetched: ${totalUsers}`)),
326
+ Effect.catchTag("FetchError", (error) =>
327
+ Effect.succeed(`Error fetching users: ${error.message}`)
328
+ )
329
+ );
330
+
331
+ // Run the program
332
+ const programWithLogging = Effect.gen(function* () {
333
+ const result = yield* program;
334
+ yield* Effect.log(`Program result: ${result}`);
335
+ return result;
336
+ });
337
+
338
+ Effect.runPromise(programWithLogging);
339
+
340
+ /*
341
+ Output:
342
+ ... level=INFO msg="Fetched page 1"
343
+ ... level=INFO msg="Fetched page 2"
344
+ ... level=INFO msg="Fetched page 3"
345
+ ... level=INFO msg="Total users fetched: 25"
346
+ 25
347
+ */
348
+ ```
349
+
350
+ **Anti-Pattern:**
351
+
352
+ The anti-pattern is to write manual, imperative logic to handle the pagination loop. This code is stateful, harder to read, and not composable.
353
+
354
+ ```typescript
355
+ import { Effect, Chunk, Option } from "effect";
356
+ // ... same mock API setup ...
357
+
358
+ const fetchAllUsersManually = (): Effect.Effect<Chunk.Chunk<User>, Error> =>
359
+ Effect.gen(function* () {
360
+ // Manual state management for results and current page
361
+ let allFetchedUsers: User[] = [];
362
+ let currentPage: Option.Option<number> = Option.some(1);
363
+
364
+ // Manual loop to fetch pages
365
+ while (Option.isSome(currentPage)) {
366
+ const [users, nextPage] = yield* fetchUsersPage(currentPage.value);
367
+ allFetchedUsers = allFetchedUsers.concat(Chunk.toArray(users));
368
+ currentPage = nextPage;
369
+ }
370
+
371
+ return Chunk.fromIterable(allFetchedUsers);
372
+ });
373
+
374
+ const program = fetchAllUsersManually().pipe(
375
+ Effect.map((users) => users.length)
376
+ );
377
+
378
+ Effect.runPromise(program).then((totalUsers) => {
379
+ console.log(`Total users fetched from all pages: ${totalUsers}`);
380
+ });
381
+ ```
382
+
383
+ This manual approach is inferior because it forces you to manage state explicitly (`allFetchedUsers`, `currentPage`). The logic is contained within a single, monolithic effect that is not lazy and cannot be easily composed with other stream operators without first collecting all results. `Stream.paginateEffect` abstracts away this entire block of boilerplate code.
384
+
385
+ **Rationale:**
386
+
387
+ To handle a data source that is split across multiple pages, use `Stream.paginateEffect` to abstract the pagination logic into a single, continuous `Stream`.
388
+
389
+ ---
390
+
391
+
392
+ Calling paginated APIs is a classic programming challenge. It often involves writing complex, stateful, and imperative code with manual loops to fetch one page, check if there's a next page, fetch that page, and so on, all while accumulating the results. This logic is tedious to write and easy to get wrong.
393
+
394
+ `Stream.paginateEffect` elegantly solves this by declaratively modeling the pagination process:
395
+
396
+ 1. **Declarative and Stateless**: You provide a function that knows how to fetch a single page, and the `Stream` handles the looping, state management (the current page token/number), and termination logic for you. Your business logic remains clean and stateless.
397
+ 2. **Lazy and Efficient**: The stream fetches pages on demand as they are consumed. If a downstream consumer only needs the first 20 items, the stream will only make enough API calls to satisfy that need, rather than wastefully fetching all pages upfront.
398
+ 3. **Fully Composable**: The result is a standard `Stream`. This means you can pipe the continuous flow of items directly into other powerful operators like `mapEffect` for concurrent processing or `grouped` for batching, without ever thinking about page boundaries again.
399
+
400
+ ---
401
+
402
+ ---
403
+
404
+ ### Process Items Concurrently
405
+
406
+ **Rule:** Use Stream.mapEffect with the `concurrency` option to process stream items in parallel.
407
+
408
+ **Good Example:**
409
+
410
+ This example processes four items, each taking one second. By setting `concurrency: 2`, the total runtime is approximately two seconds instead of four, because items are processed in parallel pairs.
411
+
412
+ ```typescript
413
+ import { Effect, Stream } from "effect";
414
+
415
+ // A mock function that simulates a slow I/O operation
416
+ const processItem = (id: number): Effect.Effect<string, Error> =>
417
+ Effect.log(`Starting item ${id}...`).pipe(
418
+ Effect.delay("1 second"),
419
+ Effect.map(() => `Finished item ${id}`),
420
+ Effect.tap(Effect.log)
421
+ );
422
+
423
+ const ids = [1, 2, 3, 4];
424
+
425
+ const program = Stream.fromIterable(ids).pipe(
426
+ // Process up to 2 items concurrently
427
+ Stream.mapEffect(processItem, { concurrency: 2 }),
428
+ Stream.runDrain
429
+ );
430
+
431
+ // Measure the total time taken
432
+ const timedProgram = Effect.timed(program);
433
+
434
+ const programWithLogging = Effect.gen(function* () {
435
+ const [duration, _] = yield* timedProgram;
436
+ const durationMs = Number(duration);
437
+ yield* Effect.log(`\nTotal time: ${Math.round(durationMs / 1000)} seconds`);
438
+ return duration;
439
+ }).pipe(
440
+ Effect.catchAll((error) =>
441
+ Effect.gen(function* () {
442
+ yield* Effect.logError(`Program error: ${error}`);
443
+ return null;
444
+ })
445
+ )
446
+ );
447
+
448
+ Effect.runPromise(programWithLogging);
449
+ /*
450
+ Output:
451
+ ... level=INFO msg="Starting item 1..."
452
+ ... level=INFO msg="Starting item 2..."
453
+ ... level=INFO msg="Finished item 1"
454
+ ... level=INFO msg="Starting item 3..."
455
+ ... level=INFO msg="Finished item 2"
456
+ ... level=INFO msg="Starting item 4..."
457
+ ... level=INFO msg="Finished item 3"
458
+ ... level=INFO msg="Finished item 4"
459
+
460
+ Total time: 2 seconds
461
+ */
462
+ ```
463
+
464
+ **Anti-Pattern:**
465
+
466
+ The anti-pattern is to process I/O-bound tasks sequentially. This is the default behavior of `Stream.mapEffect` if you don't specify a concurrency level, and it leads to poor performance.
467
+
468
+ ```typescript
469
+ import { Effect, Stream } from "effect";
470
+ // ... same processItem function ...
471
+
472
+ const ids = [1, 2, 3, 4];
473
+
474
+ // Processing sequentially (default concurrency is 1)
475
+ const program = Stream.fromIterable(ids).pipe(
476
+ Stream.mapEffect(processItem), // No concurrency option
477
+ Stream.runDrain
478
+ );
479
+
480
+ const timedProgram = Effect.timed(program);
481
+
482
+ Effect.runPromise(timedProgram).then(([duration, _]) => {
483
+ console.log(`\nTotal time: ${Math.round(duration.millis / 1000)} seconds`);
484
+ });
485
+ /*
486
+ Output:
487
+ ... level=INFO msg="Starting item 1..."
488
+ ... level=INFO msg="Finished item 1"
489
+ ... level=INFO msg="Starting item 2..."
490
+ ... level=INFO msg="Finished item 2"
491
+ ... etc.
492
+
493
+ Total time: 4 seconds
494
+ */
495
+ ```
496
+
497
+ While sequential processing is sometimes necessary to preserve order or avoid race conditions, it is a performance anti-pattern for independent, I/O-bound tasks. The concurrent approach is almost always preferable in such cases.
498
+
499
+ **Rationale:**
500
+
501
+ To process items in a stream concurrently, use `Stream.mapEffect` and provide a value greater than 1 to its `concurrency` option.
502
+
503
+ ---
504
+
505
+
506
+ For many data pipelines, the most time-consuming step is performing an I/O-bound operation for each item, such as calling an API or querying a database. Processing these items one by one (sequentially) is safe but slow, as the entire pipeline waits for each operation to complete before starting the next.
507
+
508
+ `Stream.mapEffect`'s `concurrency` option is the solution. It provides a simple, declarative way to introduce controlled parallelism into your pipeline.
509
+
510
+ 1. **Performance Boost**: It allows the stream to work on multiple items at once, drastically reducing the total execution time for I/O-bound tasks.
511
+ 2. **Controlled Parallelism**: Unlike `Promise.all` which runs everything at once, you specify the _exact_ number of concurrent operations. This is crucial for stability, as it prevents your application from overwhelming downstream services or exhausting its own resources (like file handles or network sockets).
512
+ 3. **Automatic Backpressure**: The stream will not pull new items from the source faster than the concurrent slots can process them. This backpressure is handled automatically, preventing memory issues.
513
+ 4. **Structured Concurrency**: It's fully integrated with Effect's runtime. If any concurrent operation fails, all other in-flight operations for that stream are immediately and reliably interrupted, preventing wasted work and ensuring clean shutdowns.
514
+
515
+ ---
516
+
517
+ ---
518
+
519
+ ### Process Items in Batches
520
+
521
+ **Rule:** Use Stream.grouped(n) to transform a stream of items into a stream of batched chunks.
522
+
523
+ **Good Example:**
524
+
525
+ This example processes 10 users. By using `Stream.grouped(5)`, it transforms the stream of 10 individual users into a stream of two chunks (each a batch of 5). The `saveUsersInBulk` function is then called only twice, once for each batch.
526
+
527
+ ```typescript
528
+ import { Effect, Stream, Chunk } from "effect";
529
+
530
+ // A mock function that simulates a bulk database insert
531
+ const saveUsersInBulk = (
532
+ userBatch: Chunk.Chunk<{ id: number }>
533
+ ): Effect.Effect<void, Error> =>
534
+ Effect.log(
535
+ `Saving batch of ${userBatch.length} users: ${Chunk.toArray(userBatch)
536
+ .map((u) => u.id)
537
+ .join(", ")}`
538
+ );
539
+
540
+ const userIds = Array.from({ length: 10 }, (_, i) => ({ id: i + 1 }));
541
+
542
+ const program = Stream.fromIterable(userIds).pipe(
543
+ // Group the stream of users into batches of 5
544
+ Stream.grouped(5),
545
+ // Process each batch with our bulk save function
546
+ Stream.mapEffect(saveUsersInBulk, { concurrency: 1 }),
547
+ Stream.runDrain
548
+ );
549
+
550
+ Effect.runPromise(program);
551
+ /*
552
+ Output:
553
+ ... level=INFO msg="Saving batch of 5 users: 1, 2, 3, 4, 5"
554
+ ... level=INFO msg="Saving batch of 5 users: 6, 7, 8, 9, 10"
555
+ */
556
+ ```
557
+
558
+ **Anti-Pattern:**
559
+
560
+ The anti-pattern is to process items one by one when a more efficient bulk operation is available. This is a common performance bottleneck.
561
+
562
+ ```typescript
563
+ import { Effect, Stream } from "effect";
564
+
565
+ // A mock function that saves one user at a time
566
+ const saveUser = (user: { id: number }): Effect.Effect<void, Error> =>
567
+ Effect.log(`Saving single user: ${user.id}`);
568
+
569
+ const userIds = Array.from({ length: 10 }, (_, i) => ({ id: i + 1 }));
570
+
571
+ const program = Stream.fromIterable(userIds).pipe(
572
+ // Process each user individually, leading to 10 separate "saves"
573
+ Stream.mapEffect(saveUser, { concurrency: 1 }),
574
+ Stream.runDrain
575
+ );
576
+
577
+ Effect.runPromise(program);
578
+ /*
579
+ Output:
580
+ ... level=INFO msg="Saving single user: 1"
581
+ ... level=INFO msg="Saving single user: 2"
582
+ ... (and so on for all 10 users)
583
+ */
584
+ ```
585
+
586
+ This individual processing approach is an anti-pattern because it creates unnecessary overhead. If each `saveUser` call took 50ms of network latency, the total time would be over 500ms. The batched approach might only take 100ms (2 batches \* 50ms), resulting in a 5x performance improvement.
587
+
588
+ **Rationale:**
589
+
590
+ To process items in fixed-size batches for performance, use the `Stream.grouped(batchSize)` operator to transform a stream of individual items into a stream of `Chunk`s.
591
+
592
+ ---
593
+
594
+
595
+ When interacting with external systems like databases or APIs, making one request per item is often incredibly inefficient. The network latency and overhead of each individual call can dominate the total processing time. Most high-performance systems offer bulk or batch endpoints to mitigate this.
596
+
597
+ `Stream.grouped(n)` provides a simple, declarative way to prepare your data for these bulk operations:
598
+
599
+ 1. **Performance Optimization**: It dramatically reduces the number of network roundtrips. A single API call with 100 items is far faster than 100 individual API calls.
600
+ 2. **Declarative Batching**: It abstracts away the tedious and error-prone manual logic of counting items, managing temporary buffers, and deciding when to send a batch.
601
+ 3. **Seamless Composition**: It transforms a `Stream<A>` into a `Stream<Chunk<A>>`. This new stream of chunks can be piped directly into `Stream.mapEffect`, allowing you to process each batch concurrently.
602
+ 4. **Handles Leftovers**: The operator automatically handles the final, smaller batch if the total number of items is not perfectly divisible by the batch size.
603
+
604
+ ---
605
+
606
+ ---
607
+
608
+ ### Merge Multiple Streams
609
+
610
+ **Rule:** Use merge, concat, or zip to combine multiple streams based on your requirements.
611
+
612
+ **Good Example:**
613
+
614
+ ```typescript
615
+ import { Effect, Stream, Duration, Chunk } from "effect"
616
+
617
+ // ============================================
618
+ // 1. Merge - interleave as items arrive
619
+ // ============================================
620
+
621
+ const mergeExample = Effect.gen(function* () {
622
+ // Two streams producing at different rates
623
+ const fast = Stream.fromIterable(["A1", "A2", "A3"]).pipe(
624
+ Stream.tap(() => Effect.sleep("100 millis"))
625
+ )
626
+
627
+ const slow = Stream.fromIterable(["B1", "B2", "B3"]).pipe(
628
+ Stream.tap(() => Effect.sleep("200 millis"))
629
+ )
630
+
631
+ // Merge interleaves based on arrival time
632
+ const merged = Stream.merge(fast, slow)
633
+
634
+ yield* merged.pipe(
635
+ Stream.tap((item) => Effect.log(`Received: ${item}`)),
636
+ Stream.runDrain
637
+ )
638
+ // Output order depends on timing: A1, B1, A2, A3, B2, B3 (approximately)
639
+ })
640
+
641
+ // ============================================
642
+ // 2. Merge all - combine many streams
643
+ // ============================================
644
+
645
+ const mergeAllExample = Effect.gen(function* () {
646
+ const streams = [
647
+ Stream.fromIterable([1, 2, 3]),
648
+ Stream.fromIterable([10, 20, 30]),
649
+ Stream.fromIterable([100, 200, 300]),
650
+ ]
651
+
652
+ const merged = Stream.mergeAll(streams, { concurrency: 3 })
653
+
654
+ const results = yield* merged.pipe(Stream.runCollect)
655
+ yield* Effect.log(`Merged: ${Chunk.toReadonlyArray(results)}`)
656
+ })
657
+
658
+ // ============================================
659
+ // 3. Concat - sequence streams
660
+ // ============================================
661
+
662
+ const concatExample = Effect.gen(function* () {
663
+ const first = Stream.fromIterable([1, 2, 3])
664
+ const second = Stream.fromIterable([4, 5, 6])
665
+ const third = Stream.fromIterable([7, 8, 9])
666
+
667
+ // Concat waits for each stream to complete
668
+ const sequential = Stream.concat(Stream.concat(first, second), third)
669
+
670
+ const results = yield* sequential.pipe(Stream.runCollect)
671
+ yield* Effect.log(`Concatenated: ${Chunk.toReadonlyArray(results)}`)
672
+ // Always: [1, 2, 3, 4, 5, 6, 7, 8, 9]
673
+ })
674
+
675
+ // ============================================
676
+ // 4. Zip - pair items from streams
677
+ // ============================================
678
+
679
+ const zipExample = Effect.gen(function* () {
680
+ const names = Stream.fromIterable(["Alice", "Bob", "Charlie"])
681
+ const ages = Stream.fromIterable([30, 25, 35])
682
+
683
+ // Zip pairs items by position
684
+ const zipped = Stream.zip(names, ages)
685
+
686
+ yield* zipped.pipe(
687
+ Stream.tap(([name, age]) => Effect.log(`${name} is ${age} years old`)),
688
+ Stream.runDrain
689
+ )
690
+ })
691
+
692
+ // ============================================
693
+ // 5. ZipWith - pair and transform
694
+ // ============================================
695
+
696
+ const zipWithExample = Effect.gen(function* () {
697
+ const prices = Stream.fromIterable([100, 200, 150])
698
+ const quantities = Stream.fromIterable([2, 1, 3])
699
+
700
+ // Zip and calculate total
701
+ const totals = Stream.zipWith(prices, quantities, (price, qty) => ({
702
+ price,
703
+ quantity: qty,
704
+ total: price * qty,
705
+ }))
706
+
707
+ yield* totals.pipe(
708
+ Stream.tap((item) => Effect.log(`${item.quantity}x @ $${item.price} = $${item.total}`)),
709
+ Stream.runDrain
710
+ )
711
+ })
712
+
713
+ // ============================================
714
+ // 6. ZipLatest - combine with latest values
715
+ // ============================================
716
+
717
+ const zipLatestExample = Effect.gen(function* () {
718
+ // Simulate different update rates
719
+ const temperature = Stream.fromIterable([20, 21, 22, 23]).pipe(
720
+ Stream.tap(() => Effect.sleep("100 millis"))
721
+ )
722
+
723
+ const humidity = Stream.fromIterable([50, 55, 60]).pipe(
724
+ Stream.tap(() => Effect.sleep("150 millis"))
725
+ )
726
+
727
+ // ZipLatest always uses the latest value from each stream
728
+ const combined = Stream.zipLatest(temperature, humidity)
729
+
730
+ yield* combined.pipe(
731
+ Stream.tap(([temp, hum]) => Effect.log(`Temp: ${temp}°C, Humidity: ${hum}%`)),
732
+ Stream.runDrain
733
+ )
734
+ })
735
+
736
+ // ============================================
737
+ // 7. Practical example: Merge event sources
738
+ // ============================================
739
+
740
+ interface Event {
741
+ source: string
742
+ type: string
743
+ data: unknown
744
+ }
745
+
746
+ const mergeEventSources = Effect.gen(function* () {
747
+ // Simulate multiple event sources
748
+ const mouseEvents = Stream.fromIterable([
749
+ { source: "mouse", type: "click", data: { x: 100, y: 200 } },
750
+ { source: "mouse", type: "move", data: { x: 150, y: 250 } },
751
+ ] as Event[])
752
+
753
+ const keyboardEvents = Stream.fromIterable([
754
+ { source: "keyboard", type: "keydown", data: { key: "Enter" } },
755
+ { source: "keyboard", type: "keyup", data: { key: "Enter" } },
756
+ ] as Event[])
757
+
758
+ const networkEvents = Stream.fromIterable([
759
+ { source: "network", type: "response", data: { status: 200 } },
760
+ ] as Event[])
761
+
762
+ // Merge all event sources
763
+ const allEvents = Stream.mergeAll([mouseEvents, keyboardEvents, networkEvents])
764
+
765
+ yield* allEvents.pipe(
766
+ Stream.tap((event) =>
767
+ Effect.log(`[${event.source}] ${event.type}: ${JSON.stringify(event.data)}`)
768
+ ),
769
+ Stream.runDrain
770
+ )
771
+ })
772
+
773
+ // ============================================
774
+ // 8. Run examples
775
+ // ============================================
776
+
777
+ const program = Effect.gen(function* () {
778
+ yield* Effect.log("=== Merge Example ===")
779
+ yield* mergeExample
780
+
781
+ yield* Effect.log("\n=== Concat Example ===")
782
+ yield* concatExample
783
+
784
+ yield* Effect.log("\n=== Zip Example ===")
785
+ yield* zipExample
786
+ })
787
+
788
+ Effect.runPromise(program)
789
+ ```
790
+
791
+ **Rationale:**
792
+
793
+ Choose the right combination strategy: merge for interleaving, concat for sequencing, or zip for pairing items.
794
+
795
+ ---
796
+
797
+
798
+ Merging streams enables:
799
+
800
+ 1. **Aggregation** - Combine data from multiple sources
801
+ 2. **Correlation** - Match related data
802
+ 3. **Multiplexing** - Single consumer for multiple producers
803
+ 4. **Comparison** - Process streams side by side
804
+
805
+ ---
806
+
807
+ ---
808
+
809
+ ### Process collections of data asynchronously
810
+
811
+ **Rule:** Leverage Stream to process collections effectfully with built-in concurrency control and resource safety.
812
+
813
+ **Good Example:**
814
+
815
+ This example processes a list of IDs by fetching user data for each one. `Stream.mapEffect` is used to apply an effectful function (`getUserById`) to each element, with concurrency limited to 2 simultaneous requests.
816
+
817
+ ```typescript
818
+ import { Effect, Stream, Chunk } from "effect";
819
+
820
+ // A mock function that simulates fetching a user from a database
821
+ const getUserById = (
822
+ id: number
823
+ ): Effect.Effect<{ id: number; name: string }, Error> =>
824
+ Effect.succeed({ id, name: `User ${id}` }).pipe(
825
+ Effect.delay("100 millis"),
826
+ Effect.tap(() => Effect.log(`Fetched user ${id}`))
827
+ );
828
+
829
+ // The stream-based program
830
+ const program = Stream.fromIterable([1, 2, 3, 4, 5]).pipe(
831
+ // Process each item with an Effect, limiting concurrency to 2
832
+ Stream.mapEffect(getUserById, { concurrency: 2 }),
833
+ // Run the stream and collect all results into a Chunk
834
+ Stream.runCollect
835
+ );
836
+
837
+ const programWithLogging = Effect.gen(function* () {
838
+ const users = yield* program;
839
+ yield* Effect.log(
840
+ `All users fetched: ${JSON.stringify(Chunk.toArray(users))}`
841
+ );
842
+ return users;
843
+ });
844
+
845
+ Effect.runPromise(programWithLogging);
846
+ ```
847
+
848
+ **Anti-Pattern:**
849
+
850
+ A common but flawed approach is to use `Promise.all` to handle multiple asynchronous operations. This method lacks the safety, control, and composability inherent to Effect's `Stream`.
851
+
852
+ ```typescript
853
+ // A mock function that returns a Promise
854
+ const getUserByIdAsPromise = (
855
+ id: number
856
+ ): Promise<{ id: number; name: string }> =>
857
+ new Promise((resolve) => {
858
+ setTimeout(() => {
859
+ console.log(`Fetched user ${id}`);
860
+ resolve({ id, name: `User ${id}` });
861
+ }, 100);
862
+ });
863
+
864
+ // The Promise-based program
865
+ const ids = [1, 2, 3, 4, 5];
866
+ const promises = ids.map(getUserByIdAsPromise);
867
+
868
+ Promise.all(promises).then((users) => {
869
+ console.log("All users fetched:", users);
870
+ });
871
+ ```
872
+
873
+ This anti-pattern is problematic because it immediately executes all promises in parallel with no concurrency limit, it does not benefit from Effect's structured concurrency for safe interruption, and it breaks out of the Effect context, losing composability with features like logging, retries, and dependency management.
874
+
875
+ **Rationale:**
876
+
877
+ For processing collections that involve asynchronous or effectful operations, use `Stream` to ensure resource safety, control concurrency, and maintain composability.
878
+
879
+ ---
880
+
881
+
882
+ `Stream` is a fundamental data type in Effect for handling collections of data, especially in asynchronous contexts. Unlike a simple array, a `Stream` is lazy and pull-based, meaning it only computes or fetches elements as they are needed, making it highly efficient for large or infinite datasets.
883
+
884
+ The primary benefits of using `Stream` are:
885
+
886
+ 1. **Concurrency Control**: `Stream` provides powerful and simple operators like `mapEffect` that have built-in concurrency management. This prevents overwhelming downstream services with too many parallel requests.
887
+ 2. **Resource Safety**: `Stream` is built on `Scope`, ensuring that any resources opened during the stream's operation (like file handles or network connections) are safely and reliably closed, even in the case of errors or interruption.
888
+ 3. **Composability**: Streams are highly composable. They can be filtered, mapped, transformed, and combined with other Effect data types seamlessly, allowing you to build complex data processing pipelines that remain readable and type-safe.
889
+ 4. **Resilience**: `Stream` integrates with `Schedule` to provide sophisticated retry and repeat logic, and with Effect's structured concurrency to ensure that failures in one part of a pipeline lead to a clean and predictable shutdown of the entire process.
890
+
891
+ ---
892
+
893
+ ---
894
+
895
+ ### Process a Large File with Constant Memory
896
+
897
+ **Rule:** Use Stream.fromReadable with a Node.js Readable stream to process files efficiently.
898
+
899
+ **Good Example:**
900
+
901
+ This example demonstrates reading a text file, splitting it into individual lines, and processing each line. The combination of `Stream.fromReadable`, `Stream.decodeText`, and `Stream.splitLines` is a powerful and common pattern for handling text-based files.
902
+
903
+ ```typescript
904
+ import { FileSystem } from "@effect/platform";
905
+ import { NodeFileSystem } from "@effect/platform-node";
906
+ import type { PlatformError } from "@effect/platform/Error";
907
+ import { Effect, Stream } from "effect";
908
+ import * as path from "node:path";
909
+
910
+ const processFile = (
911
+ filePath: string,
912
+ content: string
913
+ ): Effect.Effect<void, PlatformError, FileSystem.FileSystem> =>
914
+ Effect.gen(function* () {
915
+ const fs = yield* FileSystem.FileSystem;
916
+
917
+ // Write content to file
918
+ yield* fs.writeFileString(filePath, content);
919
+
920
+ // Create a STREAMING pipeline - reads file in chunks, not all at once
921
+ const fileStream = fs.readFile(filePath).pipe(
922
+ // Decode bytes to text
923
+ Stream.decodeText("utf-8"),
924
+ // Split into lines
925
+ Stream.splitLines,
926
+ // Process each line
927
+ Stream.tap((line) => Effect.log(`Processing: ${line}`))
928
+ );
929
+
930
+ // Run the stream to completion
931
+ yield* Stream.runDrain(fileStream);
932
+
933
+ // Clean up file
934
+ yield* fs.remove(filePath);
935
+ });
936
+
937
+ const program = Effect.gen(function* () {
938
+ const filePath = path.join(__dirname, "large-file.txt");
939
+
940
+ yield* processFile(filePath, "line 1\nline 2\nline 3").pipe(
941
+ Effect.catchAll((error: PlatformError) =>
942
+ Effect.logError(`Error processing file: ${error.message}`)
943
+ )
944
+ );
945
+ });
946
+
947
+ Effect.runPromise(program.pipe(Effect.provide(NodeFileSystem.layer)));
948
+
949
+ /*
950
+ Output:
951
+ ... level=INFO msg="Processing: line 1"
952
+ ... level=INFO msg="Processing: line 2"
953
+ ... level=INFO msg="Processing: line 3"
954
+ */
955
+ ```
956
+
957
+ **Anti-Pattern:**
958
+
959
+ The anti-pattern is to use synchronous, memory-intensive functions like `fs.readFileSync`. This approach is simple for tiny files but fails catastrophically for large ones.
960
+
961
+ ```typescript
962
+ import * as fs from "node:fs";
963
+ import * as path from "node:path";
964
+
965
+ const filePath = path.join(__dirname, "large-file.txt");
966
+ // Create a dummy file for the example
967
+ fs.writeFileSync(filePath, "line 1\nline 2\nline 3");
968
+
969
+ try {
970
+ // Anti-pattern: This loads the ENTIRE file into memory as a single buffer.
971
+ const fileContent = fs.readFileSync(filePath, "utf-8");
972
+ const lines = fileContent.split("\n");
973
+
974
+ for (const line of lines) {
975
+ console.log(`Processing: ${line}`);
976
+ }
977
+ } catch (err) {
978
+ console.error("Failed to read file:", err);
979
+ } finally {
980
+ // Clean up the dummy file
981
+ fs.unlinkSync(filePath);
982
+ }
983
+ ```
984
+
985
+ This is a dangerous anti-pattern because:
986
+
987
+ 1. **It's a Memory Bomb**: If `large-file.txt` were 2GB and your server had 1GB of RAM, this code would immediately crash the process.
988
+ 2. **It Blocks the Event Loop**: `readFileSync` is a synchronous, blocking operation. While it's reading the file from disk, your entire application is frozen and cannot respond to any other requests.
989
+ 3. **It's Not Composable**: You get a giant string that must be processed eagerly. You lose all the benefits of lazy processing, concurrency control, and integrated error handling that `Stream` provides.
990
+
991
+ **Rationale:**
992
+
993
+ To process a large file without consuming excessive memory, create a Node.js `Readable` stream from the file and convert it into an Effect `Stream` using `Stream.fromReadable`.
994
+
995
+ ---
996
+
997
+
998
+ The most significant advantage of a streaming architecture is its ability to handle datasets far larger than available RAM. When you need to process a multi-gigabyte log file or CSV, loading it all into memory is not an option—it will crash your application.
999
+
1000
+ The `Stream.fromReadable` constructor provides a bridge from Node.js's built-in file streaming capabilities to the Effect ecosystem. This approach is superior because:
1001
+
1002
+ 1. **Constant Memory Usage**: The file is read in small, manageable chunks. Your application's memory usage remains low and constant, regardless of whether the file is 1 megabyte or 100 gigabytes.
1003
+ 2. **Composability**: Once the file is represented as an Effect `Stream`, you can apply the full suite of powerful operators to it: `mapEffect` for concurrent processing, `filter` for selectively choosing lines, `grouped` for batching, and `retry` for resilience.
1004
+ 3. **Resource Safety**: Effect's `Stream` is built on `Scope`, which guarantees that the underlying file handle will be closed automatically when the stream finishes, fails, or is interrupted. This prevents resource leaks, a common problem in manual file handling.
1005
+
1006
+ ---
1007
+
1008
+ ---
1009
+
1010
+ ### Automatically Retry Failed Operations
1011
+
1012
+ **Rule:** Compose a Stream with the .retry(Schedule) operator to automatically recover from transient failures.
1013
+
1014
+ **Good Example:**
1015
+
1016
+ This example simulates an API that fails the first two times it's called. The stream processes a list of IDs, and the `retry` operator ensures that the failing operation for `id: 2` is automatically retried until it succeeds.
1017
+
1018
+ ```typescript
1019
+ import { Effect, Stream, Schedule } from "effect";
1020
+
1021
+ // A mock function that simulates a flaky API call
1022
+ const processItem = (id: number): Effect.Effect<string, Error> =>
1023
+ Effect.gen(function* () {
1024
+ yield* Effect.log(`Attempting to process item ${id}...`);
1025
+
1026
+ // Item 2 fails on first attempt but succeeds on retry
1027
+ if (id === 2) {
1028
+ const random = Math.random();
1029
+ if (random < 0.5) {
1030
+ // 50% chance of failure for demonstration
1031
+ yield* Effect.log(`Item ${id} failed, will retry...`);
1032
+ return yield* Effect.fail(new Error("API is temporarily down"));
1033
+ }
1034
+ }
1035
+
1036
+ yield* Effect.log(`✅ Successfully processed item ${id}`);
1037
+ return `Processed item ${id}`;
1038
+ });
1039
+
1040
+ const ids = [1, 2, 3];
1041
+
1042
+ // Define a retry policy: 3 attempts with a fixed 100ms delay
1043
+ const retryPolicy = Schedule.recurs(3).pipe(
1044
+ Schedule.addDelay(() => "100 millis")
1045
+ );
1046
+
1047
+ const program = Effect.gen(function* () {
1048
+ yield* Effect.log("=== Stream Retry on Failure Demo ===");
1049
+ yield* Effect.log(
1050
+ "Processing items with retry policy (3 attempts, 100ms delay)"
1051
+ );
1052
+
1053
+ // Process each item individually with retry
1054
+ const results = yield* Effect.forEach(
1055
+ ids,
1056
+ (id) =>
1057
+ processItem(id).pipe(
1058
+ Effect.retry(retryPolicy),
1059
+ Effect.catchAll((error) =>
1060
+ Effect.gen(function* () {
1061
+ yield* Effect.log(
1062
+ `❌ Item ${id} failed after all retries: ${error.message}`
1063
+ );
1064
+ return `Failed: item ${id}`;
1065
+ })
1066
+ )
1067
+ ),
1068
+ { concurrency: 1 }
1069
+ );
1070
+
1071
+ yield* Effect.log("=== Results ===");
1072
+ for (let index = 0; index < results.length; index++) {
1073
+ yield* Effect.log(`Item ${ids[index]}: ${results[index]}`);
1074
+ }
1075
+
1076
+ yield* Effect.log("✅ Stream processing completed");
1077
+ });
1078
+
1079
+ Effect.runPromise(program).catch((error) => {
1080
+ Effect.runSync(Effect.logError("Unexpected error: " + error));
1081
+ });
1082
+ /*
1083
+ Output:
1084
+ ... level=INFO msg="Attempting to process item 1..."
1085
+ ... level=INFO msg="Attempting to process item 2..."
1086
+ ... level=INFO msg="Item 2 failed, attempt 1."
1087
+ ... level=INFO msg="Attempting to process item 2..."
1088
+ ... level=INFO msg="Item 2 failed, attempt 2."
1089
+ ... level=INFO msg="Attempting to process item 2..."
1090
+ ... level=INFO msg="Attempting to process item 3..."
1091
+ */
1092
+ ```
1093
+
1094
+ **Anti-Pattern:**
1095
+
1096
+ The anti-pattern is to either have no retry logic at all, or to write manual, imperative retry loops inside your processing function.
1097
+
1098
+ ```typescript
1099
+ import { Effect, Stream } from "effect";
1100
+ // ... same mock processItem function ...
1101
+
1102
+ const ids = [1, 2, 3];
1103
+
1104
+ const program = Stream.fromIterable(ids).pipe(
1105
+ // No retry logic. The entire stream will fail when item 2 fails.
1106
+ Stream.mapEffect(processItem, { concurrency: 1 }),
1107
+ Stream.runDrain
1108
+ );
1109
+
1110
+ Effect.runPromise(program).catch((error) => {
1111
+ console.error("Pipeline failed:", error);
1112
+ });
1113
+ /*
1114
+ Output:
1115
+ ... level=INFO msg="Attempting to process item 1..."
1116
+ ... level=INFO msg="Attempting to process item 2..."
1117
+ ... level=INFO msg="Item 2 failed, attempt 1."
1118
+ Pipeline failed: Error: API is temporarily down
1119
+ */
1120
+ ```
1121
+
1122
+ This "fail-fast" approach is brittle. A single, temporary network blip would cause the entire pipeline to terminate, even if subsequent items could have been processed successfully. While manual retry logic inside `processItem` is possible, it pollutes the core logic with concerns about timing and attempt counting, and is far less composable and reusable than a `Schedule`.
1123
+
1124
+ **Rationale:**
1125
+
1126
+ To make a data pipeline resilient to transient failures, apply the `.retry(Schedule)` operator to the `Stream`.
1127
+
1128
+ ---
1129
+
1130
+
1131
+ Real-world systems are unreliable. Network connections drop, APIs return temporary `503` errors, and databases can experience deadlocks. A naive pipeline will fail completely on the first sign of trouble. A resilient pipeline, however, can absorb these transient errors and heal itself.
1132
+
1133
+ The `retry` operator, combined with the `Schedule` module, provides a powerful and declarative way to build this resilience:
1134
+
1135
+ 1. **Declarative Resilience**: Instead of writing complex `try/catch` loops with manual delay logic, you declaratively state _how_ the pipeline should retry. For example, "retry 3 times, with an exponential backoff starting at 100ms."
1136
+ 2. **Separation of Concerns**: Your core pipeline logic remains focused on the "happy path." The retry strategy is a separate, composable concern that you apply to the entire stream.
1137
+ 3. **Rich Scheduling Policies**: `Schedule` is incredibly powerful. You can create schedules based on a fixed number of retries, exponential backoff, jitter (to avoid thundering herd problems), or even combinations of these.
1138
+ 4. **Prevents Cascading Failures**: By handling temporary issues at the source, you prevent a small, transient glitch from causing a complete failure of your entire application.
1139
+
1140
+ ---
1141
+
1142
+ ---
1143
+
1144
+
1145
+ ## 🟠 Advanced Patterns
1146
+
1147
+ ### Fan Out to Multiple Consumers
1148
+
1149
+ **Rule:** Use broadcast or partition to send stream data to multiple consumers.
1150
+
1151
+ **Good Example:**
1152
+
1153
+ ```typescript
1154
+ import { Effect, Stream, Queue, Fiber, Chunk } from "effect"
1155
+
1156
+ // ============================================
1157
+ // 1. Broadcast to all consumers
1158
+ // ============================================
1159
+
1160
+ const broadcastExample = Effect.scoped(
1161
+ Effect.gen(function* () {
1162
+ const source = Stream.fromIterable([1, 2, 3, 4, 5])
1163
+
1164
+ // Broadcast to 3 consumers - each gets all items
1165
+ const [stream1, stream2, stream3] = yield* Stream.broadcast(source, 3)
1166
+
1167
+ // Consumer 1: Log items
1168
+ const consumer1 = stream1.pipe(
1169
+ Stream.tap((n) => Effect.log(`Consumer 1: ${n}`)),
1170
+ Stream.runDrain
1171
+ )
1172
+
1173
+ // Consumer 2: Sum items
1174
+ const consumer2 = stream2.pipe(
1175
+ Stream.runFold(0, (acc, n) => acc + n),
1176
+ Effect.tap((sum) => Effect.log(`Consumer 2 sum: ${sum}`))
1177
+ )
1178
+
1179
+ // Consumer 3: Collect to array
1180
+ const consumer3 = stream3.pipe(
1181
+ Stream.runCollect,
1182
+ Effect.tap((items) => Effect.log(`Consumer 3 collected: ${Chunk.toReadonlyArray(items)}`))
1183
+ )
1184
+
1185
+ // Run all consumers in parallel
1186
+ yield* Effect.all([consumer1, consumer2, consumer3], { concurrency: 3 })
1187
+ })
1188
+ )
1189
+
1190
+ // ============================================
1191
+ // 2. Partition by predicate
1192
+ // ============================================
1193
+
1194
+ const partitionExample = Effect.gen(function* () {
1195
+ const numbers = Stream.fromIterable([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
1196
+
1197
+ // Partition into even and odd
1198
+ const [evens, odds] = yield* Stream.partition(
1199
+ numbers,
1200
+ (n) => n % 2 === 0
1201
+ )
1202
+
1203
+ const processEvens = evens.pipe(
1204
+ Stream.tap((n) => Effect.log(`Even: ${n}`)),
1205
+ Stream.runDrain
1206
+ )
1207
+
1208
+ const processOdds = odds.pipe(
1209
+ Stream.tap((n) => Effect.log(`Odd: ${n}`)),
1210
+ Stream.runDrain
1211
+ )
1212
+
1213
+ yield* Effect.all([processEvens, processOdds], { concurrency: 2 })
1214
+ })
1215
+
1216
+ // ============================================
1217
+ // 3. Partition into multiple buckets
1218
+ // ============================================
1219
+
1220
+ interface Event {
1221
+ type: "click" | "scroll" | "submit"
1222
+ data: unknown
1223
+ }
1224
+
1225
+ const multiPartitionExample = Effect.gen(function* () {
1226
+ const events: Event[] = [
1227
+ { type: "click", data: { x: 100 } },
1228
+ { type: "scroll", data: { y: 200 } },
1229
+ { type: "submit", data: { form: "login" } },
1230
+ { type: "click", data: { x: 150 } },
1231
+ { type: "scroll", data: { y: 300 } },
1232
+ ]
1233
+
1234
+ const source = Stream.fromIterable(events)
1235
+
1236
+ // Group by type using groupByKey
1237
+ const grouped = source.pipe(
1238
+ Stream.groupByKey((event) => event.type, {
1239
+ bufferSize: 16,
1240
+ })
1241
+ )
1242
+
1243
+ // Process each group
1244
+ yield* grouped.pipe(
1245
+ Stream.flatMap(([key, stream]) =>
1246
+ stream.pipe(
1247
+ Stream.tap((event) => Effect.log(`[${key}] Processing: ${JSON.stringify(event.data)}`)),
1248
+ Stream.runDrain,
1249
+ Stream.fromEffect
1250
+ )
1251
+ ),
1252
+ Stream.runDrain
1253
+ )
1254
+ })
1255
+
1256
+ // ============================================
1257
+ // 4. Fan-out with queues (manual control)
1258
+ // ============================================
1259
+
1260
+ const queueFanOut = Effect.gen(function* () {
1261
+ const source = Stream.fromIterable([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
1262
+
1263
+ // Create queues for each consumer
1264
+ const queue1 = yield* Queue.unbounded<number>()
1265
+ const queue2 = yield* Queue.unbounded<number>()
1266
+ const queue3 = yield* Queue.unbounded<number>()
1267
+
1268
+ // Distribute items round-robin
1269
+ const distributor = source.pipe(
1270
+ Stream.zipWithIndex,
1271
+ Stream.tap(([item, index]) => {
1272
+ const queue = index % 3 === 0 ? queue1 : index % 3 === 1 ? queue2 : queue3
1273
+ return Queue.offer(queue, item)
1274
+ }),
1275
+ Stream.runDrain,
1276
+ Effect.tap(() => Effect.all([
1277
+ Queue.shutdown(queue1),
1278
+ Queue.shutdown(queue2),
1279
+ Queue.shutdown(queue3),
1280
+ ]))
1281
+ )
1282
+
1283
+ // Consumers
1284
+ const makeConsumer = (name: string, queue: Queue.Queue<number>) =>
1285
+ Stream.fromQueue(queue).pipe(
1286
+ Stream.tap((n) => Effect.log(`${name}: ${n}`)),
1287
+ Stream.runDrain
1288
+ )
1289
+
1290
+ yield* Effect.all([
1291
+ distributor,
1292
+ makeConsumer("Worker 1", queue1),
1293
+ makeConsumer("Worker 2", queue2),
1294
+ makeConsumer("Worker 3", queue3),
1295
+ ], { concurrency: 4 })
1296
+ })
1297
+
1298
+ // ============================================
1299
+ // 5. Run examples
1300
+ // ============================================
1301
+
1302
+ const program = Effect.gen(function* () {
1303
+ yield* Effect.log("=== Broadcast Example ===")
1304
+ yield* broadcastExample
1305
+
1306
+ yield* Effect.log("\n=== Partition Example ===")
1307
+ yield* partitionExample
1308
+ })
1309
+
1310
+ Effect.runPromise(program)
1311
+ ```
1312
+
1313
+ **Rationale:**
1314
+
1315
+ Use `Stream.broadcast` to send every item to all consumers, or partition streams to distribute items based on criteria.
1316
+
1317
+ ---
1318
+
1319
+
1320
+ Fan-out enables parallel processing:
1321
+
1322
+ 1. **Throughput** - Multiple consumers process faster
1323
+ 2. **Specialization** - Different consumers handle different data
1324
+ 3. **Redundancy** - Multiple copies for reliability
1325
+ 4. **Decoupling** - Consumers evolve independently
1326
+
1327
+ ---
1328
+
1329
+ ---
1330
+
1331
+ ### Manage Resources Safely in a Pipeline
1332
+
1333
+ **Rule:** Use Stream.acquireRelease to safely manage the lifecycle of a resource within a pipeline.
1334
+
1335
+ **Good Example:**
1336
+
1337
+ This example creates and writes to a temporary file. `Stream.acquireRelease` is used to acquire a readable stream from that file. The pipeline then processes the file but is designed to fail partway through. The logs demonstrate that the `release` effect (which deletes the file) is still executed, preventing any resource leaks.
1338
+
1339
+ ```typescript
1340
+ import { Effect, Layer } from "effect";
1341
+ import { FileSystem } from "@effect/platform/FileSystem";
1342
+ import { NodeFileSystem } from "@effect/platform-node";
1343
+ import * as path from "node:path";
1344
+
1345
+ interface ProcessError {
1346
+ readonly _tag: "ProcessError";
1347
+ readonly message: string;
1348
+ }
1349
+
1350
+ const ProcessError = (message: string): ProcessError => ({
1351
+ _tag: "ProcessError",
1352
+ message,
1353
+ });
1354
+
1355
+ interface FileServiceType {
1356
+ readonly createTempFile: () => Effect.Effect<{ filePath: string }, never>;
1357
+ readonly cleanup: (filePath: string) => Effect.Effect<void, never>;
1358
+ readonly readFile: (filePath: string) => Effect.Effect<string, never>;
1359
+ }
1360
+
1361
+ export class FileService extends Effect.Service<FileService>()("FileService", {
1362
+ sync: () => {
1363
+ const filePath = path.join(__dirname, "temp-resource.txt");
1364
+ return {
1365
+ createTempFile: () => Effect.succeed({ filePath }),
1366
+ cleanup: (filePath: string) =>
1367
+ Effect.log("✅ Resource cleaned up successfully"),
1368
+ readFile: (filePath: string) =>
1369
+ Effect.succeed("data 1\ndata 2\nFAIL\ndata 4"),
1370
+ };
1371
+ },
1372
+ }) {}
1373
+
1374
+ // Process a single line
1375
+ const processLine = (line: string): Effect.Effect<void, ProcessError> =>
1376
+ line === "FAIL"
1377
+ ? Effect.fail(ProcessError("Failed to process line"))
1378
+ : Effect.log(`Processed: ${line}`);
1379
+
1380
+ // Create and process the file with proper resource management
1381
+ const program = Effect.gen(function* () {
1382
+ yield* Effect.log("=== Stream Resource Management Demo ===");
1383
+ yield* Effect.log(
1384
+ "This demonstrates proper resource cleanup even when errors occur"
1385
+ );
1386
+
1387
+ const fileService = yield* FileService;
1388
+ const { filePath } = yield* fileService.createTempFile();
1389
+
1390
+ // Use scoped to ensure cleanup happens even on failure
1391
+ yield* Effect.scoped(
1392
+ Effect.gen(function* () {
1393
+ yield* Effect.addFinalizer(() => fileService.cleanup(filePath));
1394
+
1395
+ const content = yield* fileService.readFile(filePath);
1396
+ const lines = content.split("\n");
1397
+
1398
+ // Process each line, continuing even if some fail
1399
+ for (const line of lines) {
1400
+ yield* processLine(line).pipe(
1401
+ Effect.catchAll((error) =>
1402
+ Effect.log(`⚠️ Skipped line due to error: ${error.message}`)
1403
+ )
1404
+ );
1405
+ }
1406
+
1407
+ yield* Effect.log(
1408
+ "✅ Processing completed with proper resource management"
1409
+ );
1410
+ })
1411
+ );
1412
+ });
1413
+
1414
+ // Run the program with FileService layer
1415
+ Effect.runPromise(Effect.provide(program, FileService.Default)).catch(
1416
+ (error) => {
1417
+ Effect.runSync(Effect.logError("Unexpected error: " + error));
1418
+ }
1419
+ );
1420
+ ```
1421
+
1422
+ **Anti-Pattern:**
1423
+
1424
+ The anti-pattern is to manage resources manually outside the stream's context. This is brittle and almost always leads to resource leaks when errors occur.
1425
+
1426
+ ```typescript
1427
+ import { Effect, Stream } from "effect";
1428
+ import { NodeFileSystem } from "@effect/platform-node";
1429
+ import * as path from "node:path";
1430
+
1431
+ const program = Effect.gen(function* () {
1432
+ const fs = yield* NodeFileSystem;
1433
+ const filePath = path.join(__dirname, "temp-resource-bad.txt");
1434
+
1435
+ // 1. Resource acquired manually before the stream
1436
+ yield* fs.writeFileString(filePath, "data 1\ndata 2");
1437
+ const readable = fs.createReadStream(filePath);
1438
+ yield* Effect.log("Resource acquired manually.");
1439
+
1440
+ const stream = Stream.fromReadable(() => readable).pipe(
1441
+ Stream.decodeText("utf-8"),
1442
+ Stream.splitLines,
1443
+ // This stream will fail, causing the run to reject.
1444
+ Stream.map(() => {
1445
+ throw new Error("Something went wrong!");
1446
+ })
1447
+ );
1448
+
1449
+ // 2. Stream is executed
1450
+ yield* Stream.runDrain(stream);
1451
+
1452
+ // 3. This release logic is NEVER reached if the stream fails.
1453
+ yield* fs.remove(filePath);
1454
+ yield* Effect.log("Resource released manually. (This will not be logged)");
1455
+ });
1456
+
1457
+ Effect.runPromiseExit(program).then((exit) => {
1458
+ if (exit._tag === "Failure") {
1459
+ console.log("\nPipeline failed. The temp file was NOT deleted.");
1460
+ }
1461
+ });
1462
+ ```
1463
+
1464
+ In this anti-pattern, the `fs.remove` call is unreachable because the `Stream.runDrain` effect fails, causing the `gen` block to terminate immediately. The temporary file is leaked onto the disk. `Stream.acquireRelease` solves this problem entirely.
1465
+
1466
+ **Rationale:**
1467
+
1468
+ To safely manage a resource that has an open/close lifecycle (like a file handle or database connection) for the duration of a stream, use the `Stream.acquireRelease` constructor.
1469
+
1470
+ ---
1471
+
1472
+
1473
+ What happens if a pipeline processing a file fails halfway through? In a naive implementation, the file handle might be left open, leading to a resource leak. Over time, these leaks can exhaust system resources and crash your application.
1474
+
1475
+ `Stream.acquireRelease` is Effect's robust solution to this problem. It's built on `Scope`, Effect's fundamental resource-management tool.
1476
+
1477
+ 1. **Guaranteed Cleanup**: You provide an `acquire` effect to open the resource and a `release` effect to close it. Effect guarantees that the `release` effect will be called when the stream terminates, for _any_ reason: successful completion, a processing failure, or even external interruption.
1478
+ 2. **Declarative and Co-located**: The logic for a resource's entire lifecycle—acquisition, usage (the stream itself), and release—is defined in one place. This makes the code easier to understand and reason about compared to manual `try/finally` blocks.
1479
+ 3. **Prevents Resource Leaks**: It is the idiomatic way to build truly resilient pipelines that do not leak resources, which is essential for long-running, production-grade applications.
1480
+ 4. **Composability**: The resulting stream is just a normal `Stream`, which can be composed with any other stream operators.
1481
+
1482
+ ---
1483
+
1484
+ ---
1485
+
1486
+ ### Implement Backpressure in Pipelines
1487
+
1488
+ **Rule:** Use buffering and throttling to handle producers faster than consumers.
1489
+
1490
+ **Good Example:**
1491
+
1492
+ ```typescript
1493
+ import { Effect, Stream, Schedule, Duration, Queue, Chunk } from "effect"
1494
+
1495
+ // ============================================
1496
+ // 1. Stream with natural backpressure
1497
+ // ============================================
1498
+
1499
+ // Streams have built-in backpressure - consumers pull data
1500
+ const fastProducer = Stream.fromIterable(Array.from({ length: 1000 }, (_, i) => i))
1501
+
1502
+ const slowConsumer = fastProducer.pipe(
1503
+ Stream.tap((n) =>
1504
+ Effect.gen(function* () {
1505
+ yield* Effect.sleep("10 millis") // Slow processing
1506
+ yield* Effect.log(`Processed: ${n}`)
1507
+ })
1508
+ ),
1509
+ Stream.runDrain
1510
+ )
1511
+
1512
+ // Producer automatically slows down to match consumer
1513
+
1514
+ // ============================================
1515
+ // 2. Explicit buffer with drop strategy
1516
+ // ============================================
1517
+
1518
+ const bufferedStream = (source: Stream.Stream<number>) =>
1519
+ source.pipe(
1520
+ // Buffer up to 100 items, drop oldest when full
1521
+ Stream.buffer({ capacity: 100, strategy: "dropping" })
1522
+ )
1523
+
1524
+ // ============================================
1525
+ // 3. Throttling - limit rate
1526
+ // ============================================
1527
+
1528
+ const throttledStream = (source: Stream.Stream<number>) =>
1529
+ source.pipe(
1530
+ // Process at most 10 items per second
1531
+ Stream.throttle({
1532
+ cost: () => 1,
1533
+ units: 10,
1534
+ duration: "1 second",
1535
+ strategy: "enforce",
1536
+ })
1537
+ )
1538
+
1539
+ // ============================================
1540
+ // 4. Debounce - wait for quiet period
1541
+ // ============================================
1542
+
1543
+ const debouncedStream = (source: Stream.Stream<number>) =>
1544
+ source.pipe(
1545
+ // Wait 100ms of no new items before emitting
1546
+ Stream.debounce("100 millis")
1547
+ )
1548
+
1549
+ // ============================================
1550
+ // 5. Bounded queue for producer-consumer
1551
+ // ============================================
1552
+
1553
+ const boundedQueueExample = Effect.gen(function* () {
1554
+ // Create bounded queue - blocks producer when full
1555
+ const queue = yield* Queue.bounded<number>(10)
1556
+
1557
+ // Fast producer
1558
+ const producer = Effect.gen(function* () {
1559
+ for (let i = 0; i < 100; i++) {
1560
+ yield* Queue.offer(queue, i)
1561
+ yield* Effect.log(`Produced: ${i}`)
1562
+ }
1563
+ yield* Queue.shutdown(queue)
1564
+ })
1565
+
1566
+ // Slow consumer
1567
+ const consumer = Effect.gen(function* () {
1568
+ let count = 0
1569
+ while (true) {
1570
+ const item = yield* Queue.take(queue).pipe(
1571
+ Effect.catchTag("QueueShutdown", () => Effect.fail("done" as const))
1572
+ )
1573
+ if (item === "done") break
1574
+ yield* Effect.sleep("50 millis") // Slow processing
1575
+ yield* Effect.log(`Consumed: ${item}`)
1576
+ count++
1577
+ }
1578
+ return count
1579
+ }).pipe(Effect.catchAll(() => Effect.succeed(0)))
1580
+
1581
+ // Run both - producer will block when queue is full
1582
+ yield* Effect.all([producer, consumer], { concurrency: 2 })
1583
+ })
1584
+
1585
+ // ============================================
1586
+ // 6. Sliding window - keep most recent
1587
+ // ============================================
1588
+
1589
+ const slidingWindowStream = (source: Stream.Stream<number>) =>
1590
+ source.pipe(
1591
+ Stream.sliding(5), // Keep last 5 items
1592
+ Stream.map((window) => ({
1593
+ items: window,
1594
+ average: Chunk.reduce(window, 0, (a, b) => a + b) / Chunk.size(window),
1595
+ }))
1596
+ )
1597
+
1598
+ // ============================================
1599
+ // 7. Run example
1600
+ // ============================================
1601
+
1602
+ const program = Effect.gen(function* () {
1603
+ yield* Effect.log("=== Backpressure Demo ===")
1604
+
1605
+ // Throttled stream
1606
+ const throttled = Stream.fromIterable([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).pipe(
1607
+ Stream.tap((n) => Effect.log(`Emitting: ${n}`)),
1608
+ Stream.throttle({
1609
+ cost: () => 1,
1610
+ units: 2,
1611
+ duration: "1 second",
1612
+ strategy: "enforce",
1613
+ }),
1614
+ Stream.tap((n) => Effect.log(`After throttle: ${n}`)),
1615
+ Stream.runDrain
1616
+ )
1617
+
1618
+ yield* throttled
1619
+ })
1620
+
1621
+ Effect.runPromise(program)
1622
+ ```
1623
+
1624
+ **Rationale:**
1625
+
1626
+ Use Stream's built-in backpressure mechanisms and explicit buffering to handle situations where data producers are faster than consumers.
1627
+
1628
+ ---
1629
+
1630
+
1631
+ Backpressure prevents system overload:
1632
+
1633
+ 1. **Memory safety** - Don't buffer unlimited data
1634
+ 2. **Stability** - Slow consumers don't crash
1635
+ 3. **Fairness** - Distribute load appropriately
1636
+ 4. **Predictability** - Consistent performance
1637
+
1638
+ ---
1639
+
1640
+ ---
1641
+
1642
+ ### Implement Dead Letter Queues
1643
+
1644
+ **Rule:** Capture failed items with context for debugging and retry instead of losing them.
1645
+
1646
+ **Good Example:**
1647
+
1648
+ ```typescript
1649
+ import { Effect, Stream, Queue, Chunk, Ref, Data } from "effect"
1650
+
1651
+ // ============================================
1652
+ // 1. Define DLQ types
1653
+ // ============================================
1654
+
1655
+ interface DeadLetterItem<T> {
1656
+ readonly item: T
1657
+ readonly error: unknown
1658
+ readonly timestamp: Date
1659
+ readonly attempts: number
1660
+ readonly context: Record<string, unknown>
1661
+ }
1662
+
1663
+ interface ProcessingResult<T, R> {
1664
+ readonly _tag: "Success" | "Failure"
1665
+ }
1666
+
1667
+ class Success<T, R> implements ProcessingResult<T, R> {
1668
+ readonly _tag = "Success"
1669
+ constructor(
1670
+ readonly item: T,
1671
+ readonly result: R
1672
+ ) {}
1673
+ }
1674
+
1675
+ class Failure<T> implements ProcessingResult<T, never> {
1676
+ readonly _tag = "Failure"
1677
+ constructor(
1678
+ readonly item: T,
1679
+ readonly error: unknown,
1680
+ readonly attempts: number
1681
+ ) {}
1682
+ }
1683
+
1684
+ // ============================================
1685
+ // 2. Create a DLQ service
1686
+ // ============================================
1687
+
1688
+ const makeDLQ = <T>() =>
1689
+ Effect.gen(function* () {
1690
+ const queue = yield* Queue.unbounded<DeadLetterItem<T>>()
1691
+ const countRef = yield* Ref.make(0)
1692
+
1693
+ return {
1694
+ send: (item: T, error: unknown, attempts: number, context: Record<string, unknown> = {}) =>
1695
+ Effect.gen(function* () {
1696
+ yield* Queue.offer(queue, {
1697
+ item,
1698
+ error,
1699
+ timestamp: new Date(),
1700
+ attempts,
1701
+ context,
1702
+ })
1703
+ yield* Ref.update(countRef, (n) => n + 1)
1704
+ yield* Effect.log(`DLQ: Added item (total: ${(yield* Ref.get(countRef))})`)
1705
+ }),
1706
+
1707
+ getAll: () =>
1708
+ Effect.gen(function* () {
1709
+ const items: DeadLetterItem<T>[] = []
1710
+ while (!(yield* Queue.isEmpty(queue))) {
1711
+ const item = yield* Queue.poll(queue)
1712
+ if (item._tag === "Some") {
1713
+ items.push(item.value)
1714
+ }
1715
+ }
1716
+ return items
1717
+ }),
1718
+
1719
+ count: () => Ref.get(countRef),
1720
+
1721
+ queue,
1722
+ }
1723
+ })
1724
+
1725
+ // ============================================
1726
+ // 3. Process with DLQ
1727
+ // ============================================
1728
+
1729
+ interface Order {
1730
+ id: string
1731
+ amount: number
1732
+ }
1733
+
1734
+ const processOrder = (order: Order): Effect.Effect<string, Error> =>
1735
+ Effect.gen(function* () {
1736
+ // Simulate random failures
1737
+ if (order.amount < 0) {
1738
+ return yield* Effect.fail(new Error("Invalid amount"))
1739
+ }
1740
+ if (order.id === "fail") {
1741
+ return yield* Effect.fail(new Error("Processing failed"))
1742
+ }
1743
+ yield* Effect.sleep("10 millis")
1744
+ return `Processed order ${order.id}: $${order.amount}`
1745
+ })
1746
+
1747
+ const processWithRetryAndDLQ = (
1748
+ orders: Stream.Stream<Order>,
1749
+ maxRetries: number = 3
1750
+ ) =>
1751
+ Effect.gen(function* () {
1752
+ const dlq = yield* makeDLQ<Order>()
1753
+
1754
+ const results = yield* orders.pipe(
1755
+ Stream.mapEffect((order) =>
1756
+ Effect.gen(function* () {
1757
+ let lastError: unknown
1758
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
1759
+ const result = yield* processOrder(order).pipe(
1760
+ Effect.map((r) => new Success(order, r)),
1761
+ Effect.catchAll((error) =>
1762
+ Effect.gen(function* () {
1763
+ yield* Effect.log(`Attempt ${attempt}/${maxRetries} failed for ${order.id}`)
1764
+ lastError = error
1765
+ if (attempt < maxRetries) {
1766
+ yield* Effect.sleep("100 millis") // Backoff
1767
+ }
1768
+ return new Failure(order, error, attempt) as ProcessingResult<Order, string>
1769
+ })
1770
+ )
1771
+ )
1772
+
1773
+ if (result._tag === "Success") {
1774
+ return result
1775
+ }
1776
+ }
1777
+
1778
+ // All retries exhausted - send to DLQ
1779
+ yield* dlq.send(order, lastError, maxRetries, { orderId: order.id })
1780
+ return new Failure(order, lastError, maxRetries)
1781
+ })
1782
+ ),
1783
+ Stream.runCollect
1784
+ )
1785
+
1786
+ const successful = Chunk.filter(results, (r): r is Success<Order, string> => r._tag === "Success")
1787
+ const failed = Chunk.filter(results, (r): r is Failure<Order> => r._tag === "Failure")
1788
+
1789
+ yield* Effect.log(`\nResults: ${Chunk.size(successful)} success, ${Chunk.size(failed)} failed`)
1790
+
1791
+ // Get DLQ contents
1792
+ const dlqItems = yield* dlq.getAll()
1793
+ if (dlqItems.length > 0) {
1794
+ yield* Effect.log("\n=== Dead Letter Queue Contents ===")
1795
+ for (const item of dlqItems) {
1796
+ yield* Effect.log(
1797
+ `- Order ${item.item.id}: ${item.error} (attempts: ${item.attempts})`
1798
+ )
1799
+ }
1800
+ }
1801
+
1802
+ return { successful, failed, dlqItems }
1803
+ })
1804
+
1805
+ // ============================================
1806
+ // 4. DLQ reprocessing
1807
+ // ============================================
1808
+
1809
+ const reprocessDLQ = <T>(
1810
+ dlqItems: DeadLetterItem<T>[],
1811
+ processor: (item: T) => Effect.Effect<void, Error>
1812
+ ) =>
1813
+ Effect.gen(function* () {
1814
+ yield* Effect.log(`Reprocessing ${dlqItems.length} DLQ items...`)
1815
+
1816
+ for (const dlqItem of dlqItems) {
1817
+ const result = yield* processor(dlqItem.item).pipe(
1818
+ Effect.map(() => "success" as const),
1819
+ Effect.catchAll(() => Effect.succeed("failed" as const))
1820
+ )
1821
+
1822
+ yield* Effect.log(
1823
+ `Reprocess ${JSON.stringify(dlqItem.item)}: ${result}`
1824
+ )
1825
+ }
1826
+ })
1827
+
1828
+ // ============================================
1829
+ // 5. Run example
1830
+ // ============================================
1831
+
1832
+ const program = Effect.gen(function* () {
1833
+ const orders: Order[] = [
1834
+ { id: "1", amount: 100 },
1835
+ { id: "2", amount: 200 },
1836
+ { id: "fail", amount: 50 }, // Will fail all retries
1837
+ { id: "3", amount: 300 },
1838
+ { id: "4", amount: -10 }, // Invalid amount
1839
+ { id: "5", amount: 150 },
1840
+ ]
1841
+
1842
+ yield* Effect.log("=== Processing Orders ===\n")
1843
+ const { dlqItems } = yield* processWithRetryAndDLQ(Stream.fromIterable(orders), 3)
1844
+
1845
+ if (dlqItems.length > 0) {
1846
+ yield* Effect.log("\n=== Attempting DLQ Reprocessing ===")
1847
+ yield* reprocessDLQ(dlqItems, (order) =>
1848
+ Effect.gen(function* () {
1849
+ yield* Effect.log(`Manual fix for order ${order.id}`)
1850
+ })
1851
+ )
1852
+ }
1853
+ })
1854
+
1855
+ Effect.runPromise(program)
1856
+ ```
1857
+
1858
+ **Rationale:**
1859
+
1860
+ Route items that fail processing to a dead letter queue (DLQ) with error context, allowing the main pipeline to continue while preserving failed items for investigation.
1861
+
1862
+ ---
1863
+
1864
+
1865
+ Dead letter queues provide:
1866
+
1867
+ 1. **Resilience** - Pipeline continues despite failures
1868
+ 2. **Visibility** - See what's failing and why
1869
+ 3. **Recovery** - Reprocess failed items later
1870
+ 4. **Debugging** - Error context for investigation
1871
+
1872
+ ---
1873
+
1874
+ ---
1875
+
1876
+