@soffinal/stream 0.1.4 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/README.md +439 -298
- package/dist/index.d.ts +2 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +12 -8
- package/dist/reactive/index.d.ts +5 -0
- package/dist/reactive/index.d.ts.map +1 -0
- package/dist/{list.d.ts → reactive/list.d.ts} +19 -1
- package/dist/reactive/list.d.ts.map +1 -0
- package/dist/{map.d.ts → reactive/map.d.ts} +11 -1
- package/dist/reactive/map.d.ts.map +1 -0
- package/dist/{set.d.ts → reactive/set.d.ts} +11 -1
- package/dist/reactive/set.d.ts.map +1 -0
- package/dist/{state.d.ts → reactive/state.d.ts} +18 -18
- package/dist/reactive/state.d.ts.map +1 -0
- package/dist/stream.d.ts +94 -289
- package/dist/stream.d.ts.map +1 -1
- package/dist/transformers/filter.d.ts +35 -0
- package/dist/transformers/filter.d.ts.map +1 -0
- package/dist/transformers/flat.d.ts +31 -0
- package/dist/transformers/flat.d.ts.map +1 -0
- package/dist/transformers/index.d.ts +5 -0
- package/dist/transformers/index.d.ts.map +1 -0
- package/dist/transformers/map.d.ts +36 -0
- package/dist/transformers/map.d.ts.map +1 -0
- package/dist/transformers/merge.d.ts +35 -0
- package/dist/transformers/merge.d.ts.map +1 -0
- package/package.json +5 -8
- package/src/transformers/filter.md +202 -0
- package/src/transformers/flat.md +56 -0
- package/src/transformers/map.md +216 -0
- package/src/transformers/merge.md +79 -0
- package/dist/benchmark.d.ts +0 -16
- package/dist/benchmark.d.ts.map +0 -1
- package/dist/list.d.ts.map +0 -1
- package/dist/map.d.ts.map +0 -1
- package/dist/set.d.ts.map +0 -1
- package/dist/state.d.ts.map +0 -1
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { Stream } from "../stream.ts";
|
|
2
|
+
/**
|
|
3
|
+
* Adaptive map transformer that transforms values while maintaining state.
|
|
4
|
+
*
|
|
5
|
+
* @template VALUE - The type of input values
|
|
6
|
+
* @template STATE - The type of the internal state object
|
|
7
|
+
* @template MAPPED - The type of output values after transformation
|
|
8
|
+
*
|
|
9
|
+
* @param initialState - Initial state object for the transformer
|
|
10
|
+
* @param predicate - Function that transforms values and updates state
|
|
11
|
+
* - Must return `[transformedValue, newState]`
|
|
12
|
+
* - Can be async for complex transformations
|
|
13
|
+
* - Preserves order even with async operations
|
|
14
|
+
*
|
|
15
|
+
* @returns A transformer function that can be used with `.pipe()`
|
|
16
|
+
*
|
|
17
|
+
* @see {@link Stream} - Complete copy-paste transformers library
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* // Simple transformation
|
|
21
|
+
* stream.pipe(map({}, (_, value) => [value * 2, {}]))
|
|
22
|
+
*
|
|
23
|
+
* @example
|
|
24
|
+
* // Async transformation
|
|
25
|
+
* stream.pipe(
|
|
26
|
+
* map({}, async (_, value) => {
|
|
27
|
+
* const result = await process(value);
|
|
28
|
+
* return [result, {}];
|
|
29
|
+
* })
|
|
30
|
+
* )
|
|
31
|
+
*
|
|
32
|
+
|
|
33
|
+
*
|
|
34
|
+
*/
|
|
35
|
+
export declare function map<VALUE, STATE extends Record<string, unknown>, MAPPED>(initialState: STATE, predicate: (state: STATE, value: VALUE) => [MAPPED, STATE] | Promise<[MAPPED, STATE]>): (stream: Stream<VALUE>) => Stream<MAPPED>;
|
|
36
|
+
//# sourceMappingURL=map.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"map.d.ts","sourceRoot":"","sources":["../../src/transformers/map.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,wBAAgB,GAAG,CAAC,KAAK,EAAE,KAAK,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,EACtE,YAAY,EAAE,KAAK,EACnB,SAAS,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,GAAG,OAAO,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,GACpF,CAAC,MAAM,EAAE,MAAM,CAAC,KAAK,CAAC,KAAK,MAAM,CAAC,MAAM,CAAC,CAU3C"}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { Stream } from "../stream.ts";
|
|
2
|
+
type ValueOf<STREAM> = STREAM extends Stream<infer VALUE> ? VALUE : never;
|
|
3
|
+
/**
|
|
4
|
+
* Merge multiple streams into a single stream with temporal ordering.
|
|
5
|
+
*
|
|
6
|
+
* @template VALUE - The type of values from the source stream
|
|
7
|
+
* @template STREAMS - Tuple type of additional streams to merge
|
|
8
|
+
*
|
|
9
|
+
* @param streams - Additional streams to merge with the source stream
|
|
10
|
+
*
|
|
11
|
+
* @returns A transformer that merges all streams into one with union types
|
|
12
|
+
*
|
|
13
|
+
* @see {@link Stream} - Complete copy-paste transformers library
|
|
14
|
+
*
|
|
15
|
+
* @example
|
|
16
|
+
* // Basic merge with type safety
|
|
17
|
+
* const numbers = new Stream<number>();
|
|
18
|
+
* const strings = new Stream<string>();
|
|
19
|
+
* const merged = numbers.pipe(merge(strings));
|
|
20
|
+
* // Type: Stream<number | string>
|
|
21
|
+
*
|
|
22
|
+
* @example
|
|
23
|
+
* // Multiple streams
|
|
24
|
+
* const stream1 = new Stream<number>();
|
|
25
|
+
* const stream2 = new Stream<string>();
|
|
26
|
+
* const stream3 = new Stream<boolean>();
|
|
27
|
+
*
|
|
28
|
+
* const combined = stream1.pipe(merge(stream2, stream3));
|
|
29
|
+
* // Type: Stream<number | string | boolean>
|
|
30
|
+
*
|
|
31
|
+
|
|
32
|
+
*/
|
|
33
|
+
export declare function merge<VALUE, STREAMS extends [Stream<any>, ...Stream<any>[]]>(...streams: STREAMS): (stream: Stream<VALUE>) => Stream<VALUE | ValueOf<STREAMS[number]>>;
|
|
34
|
+
export {};
|
|
35
|
+
//# sourceMappingURL=merge.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"merge.d.ts","sourceRoot":"","sources":["../../src/transformers/merge.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,KAAK,OAAO,CAAC,MAAM,IAAI,MAAM,SAAS,MAAM,CAAC,MAAM,KAAK,CAAC,GAAG,KAAK,GAAG,KAAK,CAAC;AAE1E;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AACH,wBAAgB,KAAK,CAAC,KAAK,EAAE,OAAO,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,GAAG,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,EAC1E,GAAG,OAAO,EAAE,OAAO,GAClB,CAAC,MAAM,EAAE,MAAM,CAAC,KAAK,CAAC,KAAK,MAAM,CAAC,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CA0BrE"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@soffinal/stream",
|
|
3
3
|
"module": "./dist/index.js",
|
|
4
|
-
"version": "0.1
|
|
4
|
+
"version": "0.2.1",
|
|
5
5
|
"description": "A reactive event streaming library for TypeScript/JavaScript",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"devDependencies": {
|
|
@@ -10,12 +10,6 @@
|
|
|
10
10
|
"peerDependencies": {
|
|
11
11
|
"typescript": "^5.9.2"
|
|
12
12
|
},
|
|
13
|
-
"scripts": {
|
|
14
|
-
"clean": "rm -rf dist",
|
|
15
|
-
"build": "bun run clean && bun build src/index.ts --outdir dist --target node --format esm --sourcemap --minify && bun run build:types",
|
|
16
|
-
"build:types": "tsc --emitDeclarationOnly --allowImportingTsExtensions --noEmit false",
|
|
17
|
-
"release": " git add . && git commit -m \"Release v$(node -p 'require(\"./package.json\").version')\" && git tag v$(node -p 'require(\"./package.json\").version')"
|
|
18
|
-
},
|
|
19
13
|
"exports": {
|
|
20
14
|
".": {
|
|
21
15
|
"import": "./dist/index.js",
|
|
@@ -29,7 +23,8 @@
|
|
|
29
23
|
"typescript",
|
|
30
24
|
"event",
|
|
31
25
|
"observable",
|
|
32
|
-
"emitter"
|
|
26
|
+
"emitter",
|
|
27
|
+
"adaptive constraints"
|
|
33
28
|
],
|
|
34
29
|
"author": "Soffinal <smari.sofiane@gmail.com>",
|
|
35
30
|
"license": "MIT",
|
|
@@ -41,7 +36,9 @@
|
|
|
41
36
|
"types": "./dist/index.d.ts",
|
|
42
37
|
"files": [
|
|
43
38
|
"dist",
|
|
39
|
+
"src/transformers/*.md",
|
|
44
40
|
"README.md",
|
|
41
|
+
"CHANGELOG.md",
|
|
45
42
|
"LICENSE"
|
|
46
43
|
]
|
|
47
44
|
}
|
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
# Filter Transformer
|
|
2
|
+
|
|
3
|
+
## The Adaptive Gatekeeper
|
|
4
|
+
|
|
5
|
+
Traditional filtering is binary and stateless - a value either passes or doesn't. But real-world filtering often requires **memory, learning, and evolution**. The `filter` transformer embodies **Adaptive Reactive Programming** - where the gatekeeper remembers, and can even decide when to stop.
|
|
6
|
+
|
|
7
|
+
## Design
|
|
8
|
+
|
|
9
|
+
### Why State-First Architecture?
|
|
10
|
+
|
|
11
|
+
```typescript
|
|
12
|
+
filter(initialState, (state, value) => [boolean, newState]);
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
**State comes first** because it's the foundation of adaptation. This isn't just filtering - it's **Adaptive gatekeeping** that evolves with each event.
|
|
16
|
+
|
|
17
|
+
### The Dual Return Pattern
|
|
18
|
+
|
|
19
|
+
```typescript
|
|
20
|
+
return [shouldPass, newState]; // Continue with evolution
|
|
21
|
+
return; // Terminate with wisdom
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
**Two outcomes, infinite possibilities:**
|
|
25
|
+
|
|
26
|
+
- `[boolean, state]` - The filter learns and continues
|
|
27
|
+
- `void` - The filter decides the stream has served its purpose
|
|
28
|
+
|
|
29
|
+
This mirrors human decision-making: we either let something through (and remember why), or we decide we've seen enough.
|
|
30
|
+
|
|
31
|
+
### Argument Order
|
|
32
|
+
|
|
33
|
+
```typescript
|
|
34
|
+
(state, value) => // State first, value second
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
**State precedes value** because context shapes perception. We don't judge events in isolation - we judge them based on what we've learned. The state is the accumulated transformations; the value is just the current moment.
|
|
38
|
+
|
|
39
|
+
## The Adaptive Constraint System
|
|
40
|
+
|
|
41
|
+
### Level 1: Simple Gatekeeping
|
|
42
|
+
|
|
43
|
+
```typescript
|
|
44
|
+
// Traditional filtering - no memory, no learning
|
|
45
|
+
stream.pipe(filter({}, (_, value) => [value > 0, {}]));
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
Even "simple" filtering uses the adaptive architecture. The empty state `{}` represents a gatekeeper that doesn't need memory - but could develop it.
|
|
49
|
+
|
|
50
|
+
### Level 2: Memory-Based Filtering
|
|
51
|
+
|
|
52
|
+
```typescript
|
|
53
|
+
// The gatekeeper remembers and counts
|
|
54
|
+
stream.pipe(
|
|
55
|
+
filter({ count: 0 }, (state, value) => {
|
|
56
|
+
const newCount = state.count + 1;
|
|
57
|
+
return [newCount % 3 === 0, { count: newCount }]; // Every 3rd passes
|
|
58
|
+
})
|
|
59
|
+
);
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
### Level 3: Termination
|
|
63
|
+
|
|
64
|
+
```typescript
|
|
65
|
+
// The gatekeeper knows when enough is enough
|
|
66
|
+
stream.pipe(
|
|
67
|
+
filter({ seen: 0 }, (state, value) => {
|
|
68
|
+
if (state.seen >= 10) return; // Wisdom: we've seen enough
|
|
69
|
+
return [value > 0, { seen: state.seen + 1 }];
|
|
70
|
+
})
|
|
71
|
+
);
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
**Stream termination** represents the ultimate adaptive behavior - knowing when to stop. This isn't just filtering; it's **stream lifecycle management**.
|
|
75
|
+
|
|
76
|
+
### Level 4: Async
|
|
77
|
+
|
|
78
|
+
```typescript
|
|
79
|
+
// The gatekeeper consults external validation
|
|
80
|
+
stream.pipe(
|
|
81
|
+
filter({ cache: new Map() }, async (state, value) => {
|
|
82
|
+
if (state.cache.has(value)) {
|
|
83
|
+
return [state.cache.get(value), state]; // Remember previous decisions
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const isValid = await validateAsync(value);
|
|
87
|
+
state.cache.set(value, isValid); // Learn for next time
|
|
88
|
+
return [isValid, state];
|
|
89
|
+
})
|
|
90
|
+
);
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
**Async filtering with memory** - the gatekeeper doesn't just validate, it **builds institutional knowledge**.
|
|
94
|
+
|
|
95
|
+
## Essential Copy-Paste Transformers
|
|
96
|
+
|
|
97
|
+
### simpleFilter - Gateway to Adaptation
|
|
98
|
+
|
|
99
|
+
```typescript
|
|
100
|
+
// For users transitioning from traditional filtering
|
|
101
|
+
const simpleFilter = <T>(predicate: (value: T) => boolean | Promise<boolean>) =>
|
|
102
|
+
filter<T, {}>({}, async (_, value) => {
|
|
103
|
+
const shouldPass = await predicate(value);
|
|
104
|
+
return [shouldPass, {}];
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
// Usage: familiar syntax, adaptive foundation
|
|
108
|
+
stream.pipe(simpleFilter((x) => x > 0));
|
|
109
|
+
stream.pipe(simpleFilter(async (user) => await isValid(user)));
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
**Design choice**: `simpleFilter` is a **bridge**, not a replacement. It introduces users to the adaptive architecture while providing familiar syntax. The empty state `{}` is an invitation to evolution.
|
|
113
|
+
|
|
114
|
+
### take - The Counting Gatekeeper
|
|
115
|
+
|
|
116
|
+
```typescript
|
|
117
|
+
const take = <T>(n: number) =>
|
|
118
|
+
filter<T, { count: number }>({ count: 0 }, (state, value) => {
|
|
119
|
+
if (state.count >= n) return; // Wisdom: we have enough
|
|
120
|
+
return [true, { count: state.count + 1 }];
|
|
121
|
+
});
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
### distinct - The Memory Gatekeeper
|
|
125
|
+
|
|
126
|
+
```typescript
|
|
127
|
+
const distinct = <T>() =>
|
|
128
|
+
filter<T, { seen: Set<T> }>({ seen: new Set() }, (state, value) => {
|
|
129
|
+
if (state.seen.has(value)) return [false, state];
|
|
130
|
+
state.seen.add(value);
|
|
131
|
+
return [true, state];
|
|
132
|
+
});
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
### tap - The Observer Gatekeeper
|
|
136
|
+
|
|
137
|
+
```typescript
|
|
138
|
+
const tap = <T>(fn: (value: T) => void | Promise<void>) =>
|
|
139
|
+
filter<T, {}>({}, async (_, value) => {
|
|
140
|
+
await fn(value);
|
|
141
|
+
return [true, {}]; // Always pass through
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
// Usage: Side effects without changing the stream
|
|
145
|
+
stream.pipe(tap((value) => console.log("Saw:", value)));
|
|
146
|
+
stream.pipe(tap(async (value) => await logToDatabase(value)));
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
## The Termination
|
|
150
|
+
|
|
151
|
+
Stream termination isn't failure - it's **purposeful completion**. When a filter returns `void`, it's saying: "I have served my purpose, and this stream's journey ends here."
|
|
152
|
+
|
|
153
|
+
```typescript
|
|
154
|
+
// A filter that knows its mission
|
|
155
|
+
const untilCondition = <T>(condition: (value: T) => boolean) =>
|
|
156
|
+
filter<T, {}>({}, (_, value) => {
|
|
157
|
+
if (condition(value)) return; // Mission complete
|
|
158
|
+
return [true, {}];
|
|
159
|
+
});
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
This represents a fundamental shift from infinite streams to **purpose-driven streams** that know when their work is done.
|
|
163
|
+
|
|
164
|
+
## Enhanced Pipe Integration
|
|
165
|
+
|
|
166
|
+
The new pipe architecture enables seamless integration:
|
|
167
|
+
|
|
168
|
+
```typescript
|
|
169
|
+
// Filter integrates with any transformer
|
|
170
|
+
const result = stream
|
|
171
|
+
.pipe(filter({}, (_, v) => [v > 0, {}])) // Returns Stream<T>
|
|
172
|
+
.pipe(map({}, (_, v) => [v.toString(), {}])) // Returns Stream<string>
|
|
173
|
+
.pipe(toState("0")); // Returns State<string>
|
|
174
|
+
|
|
175
|
+
// Complex filtering chains
|
|
176
|
+
const processed = source
|
|
177
|
+
.pipe(
|
|
178
|
+
filter({ seen: 0 }, (state, v) => {
|
|
179
|
+
if (state.seen >= 100) return; // Terminate after 100
|
|
180
|
+
return [v > 0, { seen: state.seen + 1 }];
|
|
181
|
+
})
|
|
182
|
+
)
|
|
183
|
+
.pipe(tap((v) => console.log("Positive:", v)))
|
|
184
|
+
.pipe(
|
|
185
|
+
filter({ count: 0 }, (state, v) => {
|
|
186
|
+
return [state.count % 2 === 0, { count: state.count + 1 }]; // Every other
|
|
187
|
+
})
|
|
188
|
+
);
|
|
189
|
+
```
|
|
190
|
+
|
|
191
|
+
**Note**: Filters compose naturally because they all speak the same language - **adaptive constraints** that can terminate, remember, and evolve.
|
|
192
|
+
|
|
193
|
+
**Design insight**: Filtering State creates **conditional reactivity** - the derived state only reacts to values that pass the adaptive constraints.
|
|
194
|
+
|
|
195
|
+
## Conclusion
|
|
196
|
+
|
|
197
|
+
The `filter` transformer isn't just about removing unwanted values - it's about **intelligent gatekeeping** that:
|
|
198
|
+
|
|
199
|
+
- **Remembers** previous decisions (state)
|
|
200
|
+
- **Learns** from patterns (adaptation)
|
|
201
|
+
- **Evolves** behavior over time (constraints)
|
|
202
|
+
- **Knows** when to stop (termination)
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
# Flat Transformer
|
|
2
|
+
|
|
3
|
+
## Event Multiplication
|
|
4
|
+
|
|
5
|
+
The `flat` transformer converts array events into individual events - essentially `Array.prototype.flat()` for streams.
|
|
6
|
+
|
|
7
|
+
**Core Concept**: 1 array event → N individual events
|
|
8
|
+
|
|
9
|
+
## Usage
|
|
10
|
+
|
|
11
|
+
```typescript
|
|
12
|
+
stream.pipe(flat(depth?));
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
- **Input**: `Stream<T[]>`
|
|
16
|
+
- **Output**: `Stream<T>`
|
|
17
|
+
- **Transformation**: Each array becomes separate events
|
|
18
|
+
|
|
19
|
+
## Basic Example
|
|
20
|
+
|
|
21
|
+
```typescript
|
|
22
|
+
const arrayStream = new Stream<number[]>();
|
|
23
|
+
const flattened = arrayStream.pipe(flat());
|
|
24
|
+
|
|
25
|
+
arrayStream.push([1, 2, 3]);
|
|
26
|
+
// Emits: 1, 2, 3 as separate events
|
|
27
|
+
|
|
28
|
+
flattened.listen((value) => console.log(value));
|
|
29
|
+
// Logs: 1, 2, 3
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
## Depth Control
|
|
33
|
+
|
|
34
|
+
```typescript
|
|
35
|
+
const nested = new Stream<number[][]>();
|
|
36
|
+
const flattened = nested.pipe(flat(1)); // Flatten 2 levels
|
|
37
|
+
|
|
38
|
+
nested.push([
|
|
39
|
+
[1, 2],
|
|
40
|
+
[3, 4],
|
|
41
|
+
]);
|
|
42
|
+
// Emits: 1, 2, 3, 4 as separate events
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
## Common Pattern
|
|
46
|
+
|
|
47
|
+
```typescript
|
|
48
|
+
// Map then flatten
|
|
49
|
+
const sentences = new Stream<string>();
|
|
50
|
+
const characters = sentences.pipe(map({}, (_, s) => [s.split(""), {}])).pipe(flat());
|
|
51
|
+
|
|
52
|
+
sentences.push("hello");
|
|
53
|
+
// Emits: 'h', 'e', 'l', 'l', 'o' as separate events
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
That's it. Simple event multiplication for arrays.
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
# Map Transformer
|
|
2
|
+
|
|
3
|
+
## The Adaptive Alchemist
|
|
4
|
+
|
|
5
|
+
Traditional mapping is a simple function application - transform input A to output B. But real-world transformation often requires **context, memory, and evolution**. The `map` transformer embodies **Adaptive Reactive Programming** - where the alchemist remembers previous transformations and evolves its craft.
|
|
6
|
+
|
|
7
|
+
## Design
|
|
8
|
+
|
|
9
|
+
### Why State-First Architecture?
|
|
10
|
+
|
|
11
|
+
```typescript
|
|
12
|
+
map(initialState, (state, value) => [transformed, newState]);
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
**State comes first** because transformation is contextual. We don't transform values in isolation - we transform them based on what we've learned, what we've seen, and where we're going. The state is the accumulated knowledge; the value is just the raw material.
|
|
16
|
+
|
|
17
|
+
### The Dual Return Pattern
|
|
18
|
+
|
|
19
|
+
```typescript
|
|
20
|
+
return [transformedValue, newState]; // Transform and evolve
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
**Every transformation teaches us something.** The dual return forces us to consider: "How does this transformation change our understanding?" Even if the state doesn't change, we must consciously decide that.
|
|
24
|
+
|
|
25
|
+
### Argument Order
|
|
26
|
+
|
|
27
|
+
```typescript
|
|
28
|
+
(state, value) => // Context first, content second
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
**Context shapes transformation.** A value of `5` might become `10` (double), `"5"` (stringify), or `{ count: 5, timestamp: now }` (enrich) - depending on the accumulated state. The transformer's history determines the value's destiny.
|
|
32
|
+
|
|
33
|
+
## The Adaptive Transformation System
|
|
34
|
+
|
|
35
|
+
### Level 1: Simple Alchemy
|
|
36
|
+
|
|
37
|
+
```typescript
|
|
38
|
+
// Traditional transformation - no memory, no learning
|
|
39
|
+
stream.pipe(map({}, (_, value) => [value * 2, {}]));
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
Even "simple" mapping uses the adaptive architecture. The empty state `{}` represents a state that doesn't need memory - but could develop it.
|
|
43
|
+
|
|
44
|
+
### Level 2: Contextual Transformation
|
|
45
|
+
|
|
46
|
+
```typescript
|
|
47
|
+
// The alchemist remembers and enriches
|
|
48
|
+
stream.pipe(
|
|
49
|
+
map({ sum: 0, count: 0 }, (state, value) => {
|
|
50
|
+
const newSum = state.sum + value;
|
|
51
|
+
const newCount = state.count + 1;
|
|
52
|
+
const average = newSum / newCount;
|
|
53
|
+
|
|
54
|
+
return [
|
|
55
|
+
{ value, runningSum: newSum, runningAverage: average },
|
|
56
|
+
{ sum: newSum, count: newCount },
|
|
57
|
+
];
|
|
58
|
+
})
|
|
59
|
+
);
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
### Level 3: Evolutionary Transformation
|
|
63
|
+
|
|
64
|
+
```typescript
|
|
65
|
+
// The alchemist adapts its formula based on patterns
|
|
66
|
+
stream.pipe(
|
|
67
|
+
map({ multiplier: 1, trend: "stable" }, (state, value) => {
|
|
68
|
+
// Adapt the transformation based on observed patterns
|
|
69
|
+
const newMultiplier = value > 100 ? state.multiplier * 1.1 : state.multiplier * 0.9;
|
|
70
|
+
const trend = newMultiplier > state.multiplier ? "growing" : "shrinking";
|
|
71
|
+
|
|
72
|
+
return [value * newMultiplier, { multiplier: newMultiplier, trend }];
|
|
73
|
+
})
|
|
74
|
+
);
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
**Adaptive transformation** - the formula itself evolves based on the data it processes.
|
|
78
|
+
|
|
79
|
+
### Level 4: Async with Order Preservation
|
|
80
|
+
|
|
81
|
+
```typescript
|
|
82
|
+
// The alchemist consults external sources while maintaining order
|
|
83
|
+
stream.pipe(
|
|
84
|
+
map({ cache: new Map() }, async (state, value) => {
|
|
85
|
+
if (state.cache.has(value)) {
|
|
86
|
+
return [state.cache.get(value), state];
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
const enriched = await enrichWithAPI(value);
|
|
90
|
+
state.cache.set(value, enriched); // Learn for next time
|
|
91
|
+
return [enriched, state];
|
|
92
|
+
})
|
|
93
|
+
);
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
**Async transformation with memory** - the alchemist doesn't just transform, it **builds institutional knowledge** while preserving the natural order of events.
|
|
97
|
+
|
|
98
|
+
## Essential Copy-Paste Transformers
|
|
99
|
+
|
|
100
|
+
### simpleMap - Gateway to Adaptation
|
|
101
|
+
|
|
102
|
+
```typescript
|
|
103
|
+
// For users transitioning from traditional mapping
|
|
104
|
+
const simpleMap = <T, U>(fn: (value: T) => U | Promise<U>) =>
|
|
105
|
+
map<T, {}, U>({}, async (_, value) => {
|
|
106
|
+
const result = await fn(value);
|
|
107
|
+
return [result, {}];
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
// Usage: familiar syntax, adaptive foundation
|
|
111
|
+
stream.pipe(simpleMap((x) => x * 2));
|
|
112
|
+
stream.pipe(simpleMap(async (user) => await enrichUser(user)));
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
**Design choice**: `simpleMap` is a **bridge**, not a replacement. It introduces users to the adaptive architecture while providing familiar syntax. The empty state `{}` is an invitation to evolution.
|
|
116
|
+
|
|
117
|
+
### withIndex - The Counting Alchemist
|
|
118
|
+
|
|
119
|
+
```typescript
|
|
120
|
+
const withIndex = <T>() =>
|
|
121
|
+
map<T, { index: number }, { value: T; index: number }>({ index: 0 }, (state, value) => [
|
|
122
|
+
{ value, index: state.index },
|
|
123
|
+
{ index: state.index + 1 },
|
|
124
|
+
]);
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
`withIndex` demonstrates **sequential awareness** - the transformer knows its place in the stream's history and shares that knowledge.
|
|
128
|
+
|
|
129
|
+
### delay - The Patient Alchemist
|
|
130
|
+
|
|
131
|
+
```typescript
|
|
132
|
+
const delay = <T>(ms: number) =>
|
|
133
|
+
map<T, {}, T>({}, async (_, value) => {
|
|
134
|
+
await new Promise((resolve) => setTimeout(resolve, ms));
|
|
135
|
+
return [value, {}];
|
|
136
|
+
});
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
`delay` embodies **temporal transformation** - sometimes the most important transformation is time itself.
|
|
140
|
+
|
|
141
|
+
### pluck - The Focused Alchemist
|
|
142
|
+
|
|
143
|
+
```typescript
|
|
144
|
+
const pluck = <T, K extends keyof T>(key: K) => map<T, {}, T[K]>({}, (_, value) => [value[key], {}]);
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
`pluck` demonstrates **selective transformation** - the alchemist knows exactly what it wants and ignores everything else.
|
|
148
|
+
|
|
149
|
+
### tap - The Observer Gatekeeper
|
|
150
|
+
|
|
151
|
+
```typescript
|
|
152
|
+
const tap = <T>(fn: (value: T) => void | Promise<void>) =>
|
|
153
|
+
map<T, {}, T>({}, async (_, value) => {
|
|
154
|
+
await fn(value);
|
|
155
|
+
return [value, {}]; // Always pass through
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
// Usage: Side effects without changing the stream
|
|
159
|
+
stream.pipe(tap((value) => console.log("Saw:", value)));
|
|
160
|
+
stream.pipe(tap(async (value) => await logToDatabase(value)));
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
### scan - The Accumulating Alchemist
|
|
164
|
+
|
|
165
|
+
```typescript
|
|
166
|
+
const scan = <T, U>(fn: (acc: U, value: T) => U, initial: U) =>
|
|
167
|
+
map<T, { acc: U }, U>({ acc: initial }, (state, value) => {
|
|
168
|
+
const newAcc = fn(state.acc, value);
|
|
169
|
+
return [newAcc, { acc: newAcc }];
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
// Usage: Accumulate values over time
|
|
173
|
+
stream.pipe(scan((sum, value) => sum + value, 0)); // Running sum
|
|
174
|
+
stream.pipe(scan((max, value) => Math.max(max, value), -Infinity)); // Running max
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
`scan` demonstrates **accumulative transformation** - each value builds upon all previous values, creating a growing understanding.
|
|
178
|
+
|
|
179
|
+
## The Order Preservation
|
|
180
|
+
|
|
181
|
+
Async transformations maintain order because **sequence matters**. Even if transformation B completes before transformation A, the stream waits. This isn't just about correctness - it's about **respecting the narrative** of the data.
|
|
182
|
+
|
|
183
|
+
```typescript
|
|
184
|
+
// Order is preserved even with varying async delays
|
|
185
|
+
stream.pipe(
|
|
186
|
+
map({}, async (_, value) => {
|
|
187
|
+
const delay = Math.random() * 1000; // Random processing time
|
|
188
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
189
|
+
return [await processValue(value), {}];
|
|
190
|
+
})
|
|
191
|
+
);
|
|
192
|
+
```
|
|
193
|
+
|
|
194
|
+
**Philosophy**: The stream is a story, and stories must be told in order.
|
|
195
|
+
|
|
196
|
+
## The State Evolution Pattern
|
|
197
|
+
|
|
198
|
+
State evolution follows a natural progression:
|
|
199
|
+
|
|
200
|
+
1. **Empty State** `{}` - The transformer starts innocent
|
|
201
|
+
2. **Simple State** `{ count: 0 }` - It learns to count
|
|
202
|
+
3. **Rich State** `{ sum: 0, count: 0, average: 0 }` - It develops complex understanding
|
|
203
|
+
4. **Intelligent State** `{ cache: Map, patterns: [], predictions: {} }` - It becomes wise
|
|
204
|
+
|
|
205
|
+
This mirrors how expertise develops in any field - from simple rules to nuanced understanding.
|
|
206
|
+
|
|
207
|
+
## Conclusion
|
|
208
|
+
|
|
209
|
+
The `map` transformer isn't just about changing values - it's about **intelligent transformation** that:
|
|
210
|
+
|
|
211
|
+
- **Remembers** previous transformations (state)
|
|
212
|
+
- **Learns** from patterns (adaptation)
|
|
213
|
+
- **Evolves** its approach over time (constraints)
|
|
214
|
+
- **Preserves** the narrative order (respect)
|
|
215
|
+
- **Integrates** with reactive state (toState)
|
|
216
|
+
- **Accumulates** knowledge over time (scan)
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
# Merge Transformer
|
|
2
|
+
|
|
3
|
+
## Temporal Orchestration
|
|
4
|
+
|
|
5
|
+
The `merge` transformer combines multiple streams into one unified flow while preserving temporal order - like conducting multiple instruments into a single symphony.
|
|
6
|
+
|
|
7
|
+
**Core Concept**: Multiple streams → One unified stream with union types
|
|
8
|
+
|
|
9
|
+
## Usage
|
|
10
|
+
|
|
11
|
+
```typescript
|
|
12
|
+
stream.pipe(merge(...otherStreams));
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
- **Input**: `Stream<T>` + `Stream<U>[]`
|
|
16
|
+
- **Output**: `Stream<T | U>`
|
|
17
|
+
- **Order**: Maintains chronological sequence across all streams
|
|
18
|
+
|
|
19
|
+
## Basic Example
|
|
20
|
+
|
|
21
|
+
```typescript
|
|
22
|
+
const numbers = new Stream<number>();
|
|
23
|
+
const strings = new Stream<string>();
|
|
24
|
+
|
|
25
|
+
const combined = numbers.pipe(merge(strings));
|
|
26
|
+
// Type: Stream<number | string>
|
|
27
|
+
|
|
28
|
+
combined.listen((value) => {
|
|
29
|
+
if (typeof value === "number") {
|
|
30
|
+
console.log("Number:", value);
|
|
31
|
+
} else {
|
|
32
|
+
console.log("String:", value);
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
numbers.push(1, 2);
|
|
37
|
+
strings.push("a", "b");
|
|
38
|
+
// Output: 1, 2, "a", "b" (in temporal order)
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## Multiple Streams
|
|
42
|
+
|
|
43
|
+
```typescript
|
|
44
|
+
const events = new Stream<Event>();
|
|
45
|
+
const errors = new Stream<Error>();
|
|
46
|
+
const logs = new Stream<LogEntry>();
|
|
47
|
+
|
|
48
|
+
const allActivity = events.pipe(merge(errors, logs));
|
|
49
|
+
// Type: Stream<Event | Error | LogEntry>
|
|
50
|
+
|
|
51
|
+
// All streams flow into one unified timeline
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Type Safety
|
|
55
|
+
|
|
56
|
+
```typescript
|
|
57
|
+
interface UserEvent {
|
|
58
|
+
type: "user";
|
|
59
|
+
data: any;
|
|
60
|
+
}
|
|
61
|
+
interface SystemEvent {
|
|
62
|
+
type: "system";
|
|
63
|
+
data: any;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const userEvents = new Stream<UserEvent>();
|
|
67
|
+
const systemEvents = new Stream<SystemEvent>();
|
|
68
|
+
|
|
69
|
+
const allEvents = userEvents.pipe(merge(systemEvents));
|
|
70
|
+
|
|
71
|
+
allEvents.listen((event) => {
|
|
72
|
+
switch (event.type) {
|
|
73
|
+
case "user" /* handle user event */:
|
|
74
|
+
break;
|
|
75
|
+
case "system" /* handle system event */:
|
|
76
|
+
break;
|
|
77
|
+
}
|
|
78
|
+
});
|
|
79
|
+
```
|
package/dist/benchmark.d.ts
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
interface BenchmarkResult {
|
|
2
|
-
name: string;
|
|
3
|
-
operations: number;
|
|
4
|
-
duration: number;
|
|
5
|
-
opsPerSecond: number;
|
|
6
|
-
memoryUsage?: number;
|
|
7
|
-
}
|
|
8
|
-
declare class Benchmark {
|
|
9
|
-
private results;
|
|
10
|
-
run(name: string, operations: number, fn: () => Promise<void> | void): Promise<BenchmarkResult>;
|
|
11
|
-
getResults(): BenchmarkResult[];
|
|
12
|
-
printSummary(): void;
|
|
13
|
-
}
|
|
14
|
-
declare function runBenchmarks(): Promise<void>;
|
|
15
|
-
export { runBenchmarks, Benchmark };
|
|
16
|
-
//# sourceMappingURL=benchmark.d.ts.map
|
package/dist/benchmark.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"benchmark.d.ts","sourceRoot":"","sources":["../src/benchmark.ts"],"names":[],"mappings":"AAEA,UAAU,eAAe;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,QAAQ,EAAE,MAAM,CAAC;IACjB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,cAAM,SAAS;IACb,OAAO,CAAC,OAAO,CAAyB;IAElC,GAAG,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,GAAG,OAAO,CAAC,eAAe,CAAC;IAgCrG,UAAU,IAAI,eAAe,EAAE;IAI/B,YAAY;CAMb;AAED,iBAAe,aAAa,kBAwI3B;AAOD,OAAO,EAAE,aAAa,EAAE,SAAS,EAAE,CAAC"}
|