@effect-uai/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +43 -0
- package/dist/AiError-CqmYjXyx.d.mts +110 -0
- package/dist/AiError-CqmYjXyx.d.mts.map +1 -0
- package/dist/Items-D1C2686t.d.mts +372 -0
- package/dist/Items-D1C2686t.d.mts.map +1 -0
- package/dist/Loop-CzSJo1h8.d.mts +87 -0
- package/dist/Loop-CzSJo1h8.d.mts.map +1 -0
- package/dist/Outcome-C2JYknCu.d.mts +40 -0
- package/dist/Outcome-C2JYknCu.d.mts.map +1 -0
- package/dist/StructuredFormat-B5ueioNr.d.mts +88 -0
- package/dist/StructuredFormat-B5ueioNr.d.mts.map +1 -0
- package/dist/Tool-5wxOCuOh.d.mts +86 -0
- package/dist/Tool-5wxOCuOh.d.mts.map +1 -0
- package/dist/ToolEvent-B2N10hr3.d.mts +29 -0
- package/dist/ToolEvent-B2N10hr3.d.mts.map +1 -0
- package/dist/Turn-rlTfuHaQ.d.mts +211 -0
- package/dist/Turn-rlTfuHaQ.d.mts.map +1 -0
- package/dist/chunk-CfYAbeIz.mjs +13 -0
- package/dist/domain/AiError.d.mts +2 -0
- package/dist/domain/AiError.mjs +40 -0
- package/dist/domain/AiError.mjs.map +1 -0
- package/dist/domain/Items.d.mts +2 -0
- package/dist/domain/Items.mjs +238 -0
- package/dist/domain/Items.mjs.map +1 -0
- package/dist/domain/Turn.d.mts +2 -0
- package/dist/domain/Turn.mjs +82 -0
- package/dist/domain/Turn.mjs.map +1 -0
- package/dist/index.d.mts +14 -0
- package/dist/index.mjs +14 -0
- package/dist/language-model/LanguageModel.d.mts +60 -0
- package/dist/language-model/LanguageModel.d.mts.map +1 -0
- package/dist/language-model/LanguageModel.mjs +33 -0
- package/dist/language-model/LanguageModel.mjs.map +1 -0
- package/dist/loop/Loop.d.mts +2 -0
- package/dist/loop/Loop.mjs +172 -0
- package/dist/loop/Loop.mjs.map +1 -0
- package/dist/match/Match.d.mts +16 -0
- package/dist/match/Match.d.mts.map +1 -0
- package/dist/match/Match.mjs +15 -0
- package/dist/match/Match.mjs.map +1 -0
- package/dist/observability/Metrics.d.mts +45 -0
- package/dist/observability/Metrics.d.mts.map +1 -0
- package/dist/observability/Metrics.mjs +52 -0
- package/dist/observability/Metrics.mjs.map +1 -0
- package/dist/streaming/JSONL.d.mts +34 -0
- package/dist/streaming/JSONL.d.mts.map +1 -0
- package/dist/streaming/JSONL.mjs +51 -0
- package/dist/streaming/JSONL.mjs.map +1 -0
- package/dist/streaming/Lines.d.mts +27 -0
- package/dist/streaming/Lines.d.mts.map +1 -0
- package/dist/streaming/Lines.mjs +32 -0
- package/dist/streaming/Lines.mjs.map +1 -0
- package/dist/streaming/SSE.d.mts +31 -0
- package/dist/streaming/SSE.d.mts.map +1 -0
- package/dist/streaming/SSE.mjs +58 -0
- package/dist/streaming/SSE.mjs.map +1 -0
- package/dist/structured-format/StructuredFormat.d.mts +2 -0
- package/dist/structured-format/StructuredFormat.mjs +68 -0
- package/dist/structured-format/StructuredFormat.mjs.map +1 -0
- package/dist/testing/MockProvider.d.mts +48 -0
- package/dist/testing/MockProvider.d.mts.map +1 -0
- package/dist/testing/MockProvider.mjs +95 -0
- package/dist/testing/MockProvider.mjs.map +1 -0
- package/dist/tool/HistoryCheck.d.mts +24 -0
- package/dist/tool/HistoryCheck.d.mts.map +1 -0
- package/dist/tool/HistoryCheck.mjs +39 -0
- package/dist/tool/HistoryCheck.mjs.map +1 -0
- package/dist/tool/Outcome.d.mts +2 -0
- package/dist/tool/Outcome.mjs +45 -0
- package/dist/tool/Outcome.mjs.map +1 -0
- package/dist/tool/Resolvers.d.mts +44 -0
- package/dist/tool/Resolvers.d.mts.map +1 -0
- package/dist/tool/Resolvers.mjs +67 -0
- package/dist/tool/Resolvers.mjs.map +1 -0
- package/dist/tool/Tool.d.mts +2 -0
- package/dist/tool/Tool.mjs +79 -0
- package/dist/tool/Tool.mjs.map +1 -0
- package/dist/tool/ToolEvent.d.mts +2 -0
- package/dist/tool/ToolEvent.mjs +8 -0
- package/dist/tool/ToolEvent.mjs.map +1 -0
- package/dist/tool/Toolkit.d.mts +34 -0
- package/dist/tool/Toolkit.d.mts.map +1 -0
- package/dist/tool/Toolkit.mjs +105 -0
- package/dist/tool/Toolkit.mjs.map +1 -0
- package/package.json +127 -0
- package/src/domain/AiError.ts +93 -0
- package/src/domain/Items.ts +260 -0
- package/src/domain/Turn.ts +174 -0
- package/src/index.ts +13 -0
- package/src/language-model/LanguageModel.ts +73 -0
- package/src/loop/Loop.test.ts +412 -0
- package/src/loop/Loop.ts +295 -0
- package/src/match/Match.ts +9 -0
- package/src/observability/Metrics.ts +87 -0
- package/src/streaming/JSONL.test.ts +85 -0
- package/src/streaming/JSONL.ts +96 -0
- package/src/streaming/Lines.ts +34 -0
- package/src/streaming/SSE.test.ts +72 -0
- package/src/streaming/SSE.ts +114 -0
- package/src/structured-format/StructuredFormat.ts +160 -0
- package/src/testing/MockProvider.ts +161 -0
- package/src/tool/HistoryCheck.ts +49 -0
- package/src/tool/Outcome.ts +101 -0
- package/src/tool/Resolvers.test.ts +426 -0
- package/src/tool/Resolvers.ts +166 -0
- package/src/tool/Tool.ts +150 -0
- package/src/tool/ToolEvent.ts +37 -0
- package/src/tool/Toolkit.test.ts +45 -0
- package/src/tool/Toolkit.ts +228 -0
package/src/loop/Loop.ts
ADDED
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pull-based `loop` for state-threaded sub-streams.
|
|
3
|
+
*
|
|
4
|
+
* Each iteration runs a body that returns a `Stream<Event<A, S>>`. The body
|
|
5
|
+
* emits values via `Loop.value(a)` and signals iteration control via
|
|
6
|
+
* `Loop.next(state)` (continue with new state) or `Loop.stop` (terminate).
|
|
7
|
+
* The loop unwraps `Value` events back to `A` for downstream consumers, so
|
|
8
|
+
* the resulting stream is a plain `Stream<A>`.
|
|
9
|
+
*
|
|
10
|
+
* The next body stream is only pulled when downstream pulls the outer
|
|
11
|
+
* stream - no producer fiber, no queue buffering. Cancellation, failures,
|
|
12
|
+
* scoped resources, and backpressure stay aligned with normal Stream
|
|
13
|
+
* semantics.
|
|
14
|
+
*
|
|
15
|
+
* Convention: a `Next` or `Stop` event is the terminal element of a body's
|
|
16
|
+
* iteration. Values emitted in the same chunk after one are discarded
|
|
17
|
+
* (their producing side effects may already have run). Prefer the
|
|
18
|
+
* `Loop.nextAfter` / `Loop.stopAfter` helpers to terminate cleanly.
|
|
19
|
+
*/
|
|
20
|
+
import { Cause, Channel, Data, Effect, Exit, Function, Option, Ref, Scope, Stream } from "effect"
|
|
21
|
+
import { IncompleteTurn } from "../domain/AiError.js"
|
|
22
|
+
import { isTurnComplete, type Turn, type TurnEvent } from "../domain/Turn.js"
|
|
23
|
+
|
|
24
|
+
// ---------------------------------------------------------------------------
|
|
25
|
+
// Event type - the body's emit shape
|
|
26
|
+
// ---------------------------------------------------------------------------
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* The tagged union a body emits per pull. `Value` carries a payload that
|
|
30
|
+
* flows downstream. `Next` ends the current iteration and continues with a
|
|
31
|
+
* new state. `Stop` ends the loop entirely.
|
|
32
|
+
*/
|
|
33
|
+
export type Event<A, S> = Data.TaggedEnum<{
|
|
34
|
+
Value: { readonly value: A }
|
|
35
|
+
Next: { readonly state: S }
|
|
36
|
+
Stop: {}
|
|
37
|
+
}>
|
|
38
|
+
|
|
39
|
+
interface EventDef extends Data.TaggedEnum.WithGenerics<2> {
|
|
40
|
+
readonly taggedEnum: Event<this["A"], this["B"]>
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const Event = Data.taggedEnum<EventDef>()
|
|
44
|
+
|
|
45
|
+
/** Wrap a value so it flows through the loop to downstream consumers. */
|
|
46
|
+
export const value = <A>(a: A): Event<A, never> => Event.Value({ value: a })
|
|
47
|
+
|
|
48
|
+
/** End the current iteration and continue with a new state. */
|
|
49
|
+
export const next = <S>(state: S): Event<never, S> => Event.Next({ state })
|
|
50
|
+
|
|
51
|
+
/** The terminal `Stop` event. Use `stop` (the Stream) to end a loop body. */
|
|
52
|
+
export const stopEvent: Event<never, never> = Event.Stop()
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* A single-element stream that ends the loop. Return this from a body when
|
|
56
|
+
* there's nothing else to emit; equivalent to `stopAfter(Stream.empty)` but
|
|
57
|
+
* named for the common case.
|
|
58
|
+
*/
|
|
59
|
+
export const stop: Stream.Stream<Event<never, never>> = Stream.succeed(stopEvent)
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Pipe a raw `Stream<A>` into the loop's emit shape, then terminate the
|
|
63
|
+
* iteration with `next(state)`. Common shape for "stream this turn's
|
|
64
|
+
* deltas, then continue with updated history."
|
|
65
|
+
*/
|
|
66
|
+
export const nextAfter = <S, A, E, R>(
|
|
67
|
+
stream: Stream.Stream<A, E, R>,
|
|
68
|
+
state: S,
|
|
69
|
+
): Stream.Stream<Event<A, S>, E, R> =>
|
|
70
|
+
Stream.concat(Stream.map(stream, value), Stream.fromIterable([next(state)]))
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Pipe a raw `Stream<A>` into the loop's emit shape, then terminate the
|
|
74
|
+
* loop. Common shape for "stream this turn's deltas, then we're done."
|
|
75
|
+
*/
|
|
76
|
+
export const stopAfter = <A, E, R>(
|
|
77
|
+
stream: Stream.Stream<A, E, R>,
|
|
78
|
+
): Stream.Stream<Event<A, never>, E, R> =>
|
|
79
|
+
Stream.concat(Stream.map(stream, value), Stream.fromIterable([stopEvent]))
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* General `nextAfter` variant: drain `stream` to the consumer, fold elements
|
|
83
|
+
* into an accumulator, and at end-of-stream emit one `next(build(finalAcc))`.
|
|
84
|
+
*
|
|
85
|
+
* Subsumes `nextAfter` when state is constant (`reduce: (s, _) => s`,
|
|
86
|
+
* `build: (s) => s`). Used by `Toolkit.nextStateFrom` to collect tool
|
|
87
|
+
* results and build next state without exposing a Ref to recipes.
|
|
88
|
+
*/
|
|
89
|
+
export const nextAfterFold = <A, B, S, E, R>(
|
|
90
|
+
stream: Stream.Stream<A, E, R>,
|
|
91
|
+
initial: B,
|
|
92
|
+
reduce: (acc: B, a: A) => B,
|
|
93
|
+
build: (b: B) => S,
|
|
94
|
+
): Stream.Stream<Event<A, S>, E, R> =>
|
|
95
|
+
Stream.unwrap(
|
|
96
|
+
Effect.gen(function* () {
|
|
97
|
+
const ref = yield* Ref.make(initial)
|
|
98
|
+
const tapped = stream.pipe(
|
|
99
|
+
Stream.tap((a) => Ref.update(ref, (acc) => reduce(acc, a))),
|
|
100
|
+
Stream.map(value),
|
|
101
|
+
)
|
|
102
|
+
const continuation = Stream.fromEffect(
|
|
103
|
+
Ref.get(ref).pipe(Effect.map((acc) => next(build(acc)))),
|
|
104
|
+
)
|
|
105
|
+
return tapped.pipe(Stream.concat(continuation))
|
|
106
|
+
}),
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
// ---------------------------------------------------------------------------
|
|
110
|
+
// streamUntilComplete - turn-aware stream operator for loop bodies
|
|
111
|
+
// ---------------------------------------------------------------------------
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Lift a provider's `Stream<TurnEvent>` into a loop body's `Stream<Event<TurnEvent | A, S>>`.
|
|
115
|
+
* Each delta passes through as `value(delta)` (including the terminal
|
|
116
|
+
* `turn_complete`, so the consumer sees turn boundaries naturally). Once
|
|
117
|
+
* the terminal arrives, `then(turn)` runs and its returned stream of loop
|
|
118
|
+
* events (typically tool outputs followed by `next(state)` or `stop`) is
|
|
119
|
+
* concatenated.
|
|
120
|
+
*
|
|
121
|
+
* Pre-pipe transforms (`Stream.tap` / `Stream.map` / `Stream.filter`) on
|
|
122
|
+
* the raw delta stream cover anything an `emit`-style callback would do.
|
|
123
|
+
*
|
|
124
|
+
* If the upstream ends without a `turn_complete`, the resulting stream
|
|
125
|
+
* fails with `AiError.IncompleteTurn`. Catch it via `Stream.catchTag` if
|
|
126
|
+
* you want to recover.
|
|
127
|
+
*/
|
|
128
|
+
export const streamUntilComplete =
|
|
129
|
+
<S, A, E2 = never, R2 = never>(
|
|
130
|
+
then: (turn: Turn) => Effect.Effect<Stream.Stream<Event<A, S>, E2, R2>, E2, R2>,
|
|
131
|
+
) =>
|
|
132
|
+
<E, R>(
|
|
133
|
+
deltas: Stream.Stream<TurnEvent, E, R>,
|
|
134
|
+
): Stream.Stream<Event<TurnEvent | A, S>, E | E2 | IncompleteTurn, R | R2> =>
|
|
135
|
+
Stream.unwrap(
|
|
136
|
+
Effect.gen(function* () {
|
|
137
|
+
const turnRef = yield* Ref.make<Option.Option<Turn>>(Option.none())
|
|
138
|
+
|
|
139
|
+
const events: Stream.Stream<Event<TurnEvent, S>, E, R> = deltas.pipe(
|
|
140
|
+
Stream.tap((delta) =>
|
|
141
|
+
isTurnComplete(delta) ? Ref.set(turnRef, Option.some(delta.turn)) : Effect.void,
|
|
142
|
+
),
|
|
143
|
+
Stream.map(value),
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
const continuation = Stream.unwrap(
|
|
147
|
+
Effect.gen(function* () {
|
|
148
|
+
const opt = yield* Ref.get(turnRef)
|
|
149
|
+
if (Option.isNone(opt)) return yield* Effect.fail(new IncompleteTurn({}))
|
|
150
|
+
return yield* then(opt.value)
|
|
151
|
+
}),
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
return Stream.concat(events, continuation)
|
|
155
|
+
}),
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
// ---------------------------------------------------------------------------
|
|
159
|
+
// Internal helpers
|
|
160
|
+
// ---------------------------------------------------------------------------
|
|
161
|
+
|
|
162
|
+
const isNonEmpty = <A>(array: ReadonlyArray<A>): array is readonly [A, ...Array<A>] =>
|
|
163
|
+
array.length > 0
|
|
164
|
+
|
|
165
|
+
interface CurrentBody<S, A, E, R> {
|
|
166
|
+
readonly scope: Scope.Closeable
|
|
167
|
+
readonly pull: Effect.Effect<ReadonlyArray<Event<A, S>>, E | Cause.Done<void>, R>
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const closeBody = <S, A, E, R>(
|
|
171
|
+
current: CurrentBody<S, A, E, R>,
|
|
172
|
+
exit: Exit.Exit<unknown, unknown>,
|
|
173
|
+
) => Scope.close(current.scope, exit)
|
|
174
|
+
|
|
175
|
+
/**
|
|
176
|
+
* Walk a chunk of `Event<A, S>` until a terminal `Next` or `Stop` is found.
|
|
177
|
+
* Returns the unwrapped values seen so far and (optionally) the terminal
|
|
178
|
+
* event. Anything in the chunk after the terminal is discarded - its
|
|
179
|
+
* producing side effects may have run, but downstream never sees it.
|
|
180
|
+
*/
|
|
181
|
+
const partitionChunk = <A, S>(
|
|
182
|
+
chunk: ReadonlyArray<Event<A, S>>,
|
|
183
|
+
): { readonly values: Array<A>; readonly decision: Event<A, S> | undefined } => {
|
|
184
|
+
const values: Array<A> = []
|
|
185
|
+
for (let i = 0; i < chunk.length; i++) {
|
|
186
|
+
const event = chunk[i]!
|
|
187
|
+
if (event._tag === "Value") {
|
|
188
|
+
values.push(event.value)
|
|
189
|
+
} else {
|
|
190
|
+
return { values, decision: event }
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
return { values, decision: undefined }
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
// ---------------------------------------------------------------------------
|
|
197
|
+
// Public API
|
|
198
|
+
// ---------------------------------------------------------------------------
|
|
199
|
+
|
|
200
|
+
type LoopBody<S, A, E, R> = (
|
|
201
|
+
state: S,
|
|
202
|
+
) => Stream.Stream<Event<A, S>, E, R> | Effect.Effect<Stream.Stream<Event<A, S>, E, R>, E, R>
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* Drive a state-threaded loop body. Each iteration runs `body(state)` to get
|
|
206
|
+
* a `Stream<Event<A, S>>`; values flow downstream, `next(s)` continues with
|
|
207
|
+
* a new state, `stop` ends the loop. See the file header for the full
|
|
208
|
+
* pull-based execution model.
|
|
209
|
+
*
|
|
210
|
+
* Dual: data-first `loop(initial, body)` and data-last `loop(body)(initial)`
|
|
211
|
+
* (or `pipe(initial, loop(body))`) both work.
|
|
212
|
+
*/
|
|
213
|
+
export const loop: {
|
|
214
|
+
<S, A, E, R>(body: LoopBody<S, A, E, R>): (initial: S) => Stream.Stream<A, E, R>
|
|
215
|
+
<S, A, E, R>(initial: S, body: LoopBody<S, A, E, R>): Stream.Stream<A, E, R>
|
|
216
|
+
} = Function.dual(
|
|
217
|
+
2,
|
|
218
|
+
<S, A, E, R>(initial: S, body: LoopBody<S, A, E, R>): Stream.Stream<A, E, R> =>
|
|
219
|
+
Stream.scoped(
|
|
220
|
+
Stream.fromPull(
|
|
221
|
+
Effect.gen(function* () {
|
|
222
|
+
const outerScope = yield* Effect.scope
|
|
223
|
+
let state = initial
|
|
224
|
+
let current: CurrentBody<S, A, E, R> | undefined
|
|
225
|
+
let done = false
|
|
226
|
+
|
|
227
|
+
const closeActive = (
|
|
228
|
+
active: CurrentBody<S, A, E, R>,
|
|
229
|
+
exit: Exit.Exit<unknown, unknown>,
|
|
230
|
+
) => {
|
|
231
|
+
const isActive = current === active
|
|
232
|
+
if (isActive) current = undefined
|
|
233
|
+
// Scope.close is idempotent. Multiple paths can race to close the
|
|
234
|
+
// active body during cancellation/failure, so closing twice is safe.
|
|
235
|
+
return closeBody(active, exit)
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
yield* Scope.addFinalizerExit(outerScope, (exit) =>
|
|
239
|
+
current === undefined ? Effect.void : closeActive(current, exit),
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
const pull = Effect.gen(function* () {
|
|
243
|
+
while (true) {
|
|
244
|
+
if (done) return yield* Cause.done()
|
|
245
|
+
|
|
246
|
+
if (current === undefined) {
|
|
247
|
+
const result = body(state)
|
|
248
|
+
const stream = Effect.isEffect(result) ? Stream.unwrap(result) : result
|
|
249
|
+
const bodyScope = yield* Scope.fork(outerScope)
|
|
250
|
+
const bodyPull = yield* Channel.toPullScoped(
|
|
251
|
+
Stream.toChannel(stream),
|
|
252
|
+
bodyScope,
|
|
253
|
+
).pipe(Effect.onError((cause) => Scope.close(bodyScope, Exit.failCause(cause))))
|
|
254
|
+
current = { scope: bodyScope, pull: bodyPull }
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
const active = current
|
|
258
|
+
const chunk = yield* active.pull.pipe(
|
|
259
|
+
Effect.catchIf(Cause.isDone, () =>
|
|
260
|
+
closeActive(active, Exit.void).pipe(
|
|
261
|
+
Effect.as(undefined as ReadonlyArray<Event<A, S>> | undefined),
|
|
262
|
+
),
|
|
263
|
+
),
|
|
264
|
+
Effect.onError((cause) => closeActive(active, Exit.failCause(cause))),
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
if (chunk === undefined) {
|
|
268
|
+
done = true
|
|
269
|
+
return yield* Cause.done()
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
const { values, decision } = partitionChunk(chunk)
|
|
273
|
+
|
|
274
|
+
if (decision !== undefined) {
|
|
275
|
+
yield* closeActive(active, Exit.void)
|
|
276
|
+
if (decision._tag === "Stop") {
|
|
277
|
+
done = true
|
|
278
|
+
} else if (decision._tag === "Next") {
|
|
279
|
+
state = decision.state
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
// Emit the values seen so far if any. Chunks from a Stream pull
|
|
284
|
+
// are non-empty, so when `decision === undefined` every event was
|
|
285
|
+
// a `Value` and `values` is non-empty here. With a decision and
|
|
286
|
+
// no preceding values, fall through to the next iteration.
|
|
287
|
+
if (isNonEmpty(values)) return values
|
|
288
|
+
}
|
|
289
|
+
})
|
|
290
|
+
|
|
291
|
+
return pull
|
|
292
|
+
}),
|
|
293
|
+
),
|
|
294
|
+
),
|
|
295
|
+
)
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { Match } from "effect"
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Dispatch on the `type` discriminator of a tagged union. Equivalent to
|
|
5
|
+
* `Match.discriminator("type")`, exposed as a named helper because the
|
|
6
|
+
* `type` field is the framework's convention for `Item`, `TurnEvent`,
|
|
7
|
+
* `ContentBlock`, and most provider wire types.
|
|
8
|
+
*/
|
|
9
|
+
export const matchType = Match.discriminator("type")
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { Clock, Duration, Effect, Option, Stream } from "effect"
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Annotate every event in a stream with the elapsed `Duration` since the
|
|
5
|
+
* stream started consuming. The first event reports its time-from-start,
|
|
6
|
+
* which is also the conventional "time to first ____" metric.
|
|
7
|
+
*/
|
|
8
|
+
export const withElapsed = <A, E, R>(
|
|
9
|
+
self: Stream.Stream<A, E, R>,
|
|
10
|
+
): Stream.Stream<{ readonly value: A; readonly elapsed: Duration.Duration }, E, R> =>
|
|
11
|
+
Stream.unwrap(
|
|
12
|
+
Effect.map(Clock.currentTimeMillis, (start) =>
|
|
13
|
+
self.pipe(
|
|
14
|
+
Stream.mapEffect((value) =>
|
|
15
|
+
Effect.map(Clock.currentTimeMillis, (now) => ({
|
|
16
|
+
value,
|
|
17
|
+
elapsed: Duration.millis(now - start),
|
|
18
|
+
})),
|
|
19
|
+
),
|
|
20
|
+
),
|
|
21
|
+
),
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Compute the elapsed time until the first event matching the predicate.
|
|
26
|
+
* Returns `Option.none()` if the stream completes without one.
|
|
27
|
+
*
|
|
28
|
+
* Consumes the stream. To track this *alongside* live consumption, use
|
|
29
|
+
* `Stream.broadcast` to fan the source out and run `timeToFirst` on one
|
|
30
|
+
* branch.
|
|
31
|
+
*/
|
|
32
|
+
export const timeToFirst =
|
|
33
|
+
<A>(predicate: (a: A) => boolean) =>
|
|
34
|
+
<E, R>(self: Stream.Stream<A, E, R>): Effect.Effect<Option.Option<Duration.Duration>, E, R> =>
|
|
35
|
+
withElapsed(self).pipe(
|
|
36
|
+
Stream.filter(({ value }) => predicate(value)),
|
|
37
|
+
Stream.runHead,
|
|
38
|
+
Effect.map(Option.map(({ elapsed }) => elapsed)),
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
export interface RatePoint<A> {
|
|
42
|
+
readonly value: A
|
|
43
|
+
readonly total: number
|
|
44
|
+
readonly ratePerSecond: number
|
|
45
|
+
readonly elapsed: Duration.Duration
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Annotate every event with a running total and a rolling rate per second,
|
|
50
|
+
* computed from a user-supplied weight function.
|
|
51
|
+
*
|
|
52
|
+
* The weight is the unit you care about - bytes, tokens, error count, etc.
|
|
53
|
+
* For tokens-per-second on `TurnEvent`, pass:
|
|
54
|
+
*
|
|
55
|
+
* `(d) => d.type === "text_delta" ? countTokens(d.text) : 0`
|
|
56
|
+
*
|
|
57
|
+
* Use any tokenizer you like; the library does not ship one.
|
|
58
|
+
*/
|
|
59
|
+
export const withRate =
|
|
60
|
+
<A>(weight: (a: A) => number) =>
|
|
61
|
+
<E, R>(self: Stream.Stream<A, E, R>): Stream.Stream<RatePoint<A>, E, R> =>
|
|
62
|
+
Stream.unwrap(
|
|
63
|
+
Effect.map(Clock.currentTimeMillis, (start) =>
|
|
64
|
+
self.pipe(
|
|
65
|
+
Stream.mapAccumEffect(
|
|
66
|
+
() => ({ total: 0 }),
|
|
67
|
+
(acc, value) =>
|
|
68
|
+
Effect.map(Clock.currentTimeMillis, (now) => {
|
|
69
|
+
const total = acc.total + weight(value)
|
|
70
|
+
const elapsedMs = now - start
|
|
71
|
+
const ratePerSecond = elapsedMs > 0 ? (total / elapsedMs) * 1000 : 0
|
|
72
|
+
return [
|
|
73
|
+
{ total },
|
|
74
|
+
[
|
|
75
|
+
{
|
|
76
|
+
value,
|
|
77
|
+
total,
|
|
78
|
+
ratePerSecond,
|
|
79
|
+
elapsed: Duration.millis(elapsedMs),
|
|
80
|
+
} satisfies RatePoint<A>,
|
|
81
|
+
],
|
|
82
|
+
] as const
|
|
83
|
+
}),
|
|
84
|
+
),
|
|
85
|
+
),
|
|
86
|
+
),
|
|
87
|
+
)
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import { Effect, Result, Schema, Stream } from "effect"
|
|
2
|
+
import { describe, expect, it } from "vitest"
|
|
3
|
+
import * as JSONL from "./JSONL.js"
|
|
4
|
+
|
|
5
|
+
const enc = new TextEncoder()
|
|
6
|
+
const bytesOf = (...chunks: ReadonlyArray<string>) =>
|
|
7
|
+
Stream.fromIterable(chunks.map((c) => enc.encode(c)))
|
|
8
|
+
|
|
9
|
+
const collect = <A, E>(s: Stream.Stream<A, E>) => Effect.runPromise(Stream.runCollect(s))
|
|
10
|
+
|
|
11
|
+
const collectResult = <A, E>(s: Stream.Stream<A, E>) =>
|
|
12
|
+
Effect.runPromise(Effect.result(Stream.runCollect(s)))
|
|
13
|
+
|
|
14
|
+
const Patch = Schema.Struct({ op: Schema.String, value: Schema.Number })
|
|
15
|
+
|
|
16
|
+
describe("JSONL.fromBytes", () => {
|
|
17
|
+
it("emits one string per line", async () => {
|
|
18
|
+
const out = await collect(JSONL.fromBytes(bytesOf("a\nb\nc\n")))
|
|
19
|
+
expect(out).toEqual(["a", "b", "c"])
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
it("buffers lines across chunk boundaries", async () => {
|
|
23
|
+
const out = await collect(JSONL.fromBytes(bytesOf("ab", "c\nde", "f\n")))
|
|
24
|
+
expect(out).toEqual(["abc", "def"])
|
|
25
|
+
})
|
|
26
|
+
|
|
27
|
+
it("flushes a trailing line without a final newline", async () => {
|
|
28
|
+
const out = await collect(JSONL.fromBytes(bytesOf("a\nb")))
|
|
29
|
+
expect(out).toEqual(["a", "b"])
|
|
30
|
+
})
|
|
31
|
+
|
|
32
|
+
it("ignores blank lines", async () => {
|
|
33
|
+
const out = await collect(JSONL.fromBytes(bytesOf("a\n\n\nb\n")))
|
|
34
|
+
expect(out).toEqual(["a", "b"])
|
|
35
|
+
})
|
|
36
|
+
})
|
|
37
|
+
|
|
38
|
+
describe("JSONL.parse", () => {
|
|
39
|
+
it("decodes well-formed JSON lines through a Schema", async () => {
|
|
40
|
+
const out = await collect(
|
|
41
|
+
bytesOf(`{"op":"add","value":1}\n{"op":"sub","value":2}\n`).pipe(
|
|
42
|
+
JSONL.fromBytes,
|
|
43
|
+
JSONL.parse(Patch),
|
|
44
|
+
),
|
|
45
|
+
)
|
|
46
|
+
expect(out).toEqual([
|
|
47
|
+
{ op: "add", value: 1 },
|
|
48
|
+
{ op: "sub", value: 2 },
|
|
49
|
+
])
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
it("fails with JsonParseError on malformed JSON", async () => {
|
|
53
|
+
const result = await collectResult(
|
|
54
|
+
bytesOf(`{"op":"add","value":1}\nNOT_JSON\n`).pipe(JSONL.fromBytes, JSONL.parse(Patch)),
|
|
55
|
+
)
|
|
56
|
+
expect(Result.isFailure(result)).toBe(true)
|
|
57
|
+
if (Result.isFailure(result)) {
|
|
58
|
+
expect(result.failure._tag).toBe("JsonParseError")
|
|
59
|
+
expect(result.failure.line).toBe("NOT_JSON")
|
|
60
|
+
}
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
it("fails with JsonParseError on schema mismatch", async () => {
|
|
64
|
+
const result = await collectResult(
|
|
65
|
+
bytesOf(`{"op":"add","value":"not a number"}\n`).pipe(JSONL.fromBytes, JSONL.parse(Patch)),
|
|
66
|
+
)
|
|
67
|
+
expect(Result.isFailure(result)).toBe(true)
|
|
68
|
+
if (Result.isFailure(result)) {
|
|
69
|
+
expect(result.failure._tag).toBe("JsonParseError")
|
|
70
|
+
}
|
|
71
|
+
})
|
|
72
|
+
})
|
|
73
|
+
|
|
74
|
+
describe("JSONL round-trip", () => {
|
|
75
|
+
it("toBytes then fromBytes/parse recovers the values", async () => {
|
|
76
|
+
const values = [
|
|
77
|
+
{ op: "a", value: 1 },
|
|
78
|
+
{ op: "b", value: 2 },
|
|
79
|
+
]
|
|
80
|
+
const out = await collect(
|
|
81
|
+
Stream.fromIterable(values).pipe(JSONL.toBytes(Patch), JSONL.fromBytes, JSONL.parse(Patch)),
|
|
82
|
+
)
|
|
83
|
+
expect(out).toEqual(values)
|
|
84
|
+
})
|
|
85
|
+
})
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { Data, Effect, Schema, Stream } from "effect"
|
|
2
|
+
|
|
3
|
+
export class JsonParseError extends Data.TaggedError("JsonParseError")<{
|
|
4
|
+
readonly line: string
|
|
5
|
+
readonly cause: unknown
|
|
6
|
+
}> {}
|
|
7
|
+
|
|
8
|
+
// ---------------------------------------------------------------------------
|
|
9
|
+
// Generic stream helpers (kept module-local; see SSE.ts for the same shape).
|
|
10
|
+
// ---------------------------------------------------------------------------
|
|
11
|
+
|
|
12
|
+
const decodeText = <E, R>(self: Stream.Stream<Uint8Array, E, R>): Stream.Stream<string, E, R> =>
|
|
13
|
+
self.pipe(
|
|
14
|
+
Stream.mapAccum(
|
|
15
|
+
(): TextDecoder => new TextDecoder("utf-8"),
|
|
16
|
+
(decoder, chunk: Uint8Array) => [decoder, [decoder.decode(chunk, { stream: true })]] as const,
|
|
17
|
+
{
|
|
18
|
+
onHalt: (decoder: TextDecoder) => {
|
|
19
|
+
const tail = decoder.decode()
|
|
20
|
+
return tail.length > 0 ? [tail] : []
|
|
21
|
+
},
|
|
22
|
+
},
|
|
23
|
+
),
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
const splitOn =
|
|
27
|
+
(separator: string) =>
|
|
28
|
+
<E, R>(self: Stream.Stream<string, E, R>): Stream.Stream<string, E, R> =>
|
|
29
|
+
self.pipe(
|
|
30
|
+
Stream.mapAccum(
|
|
31
|
+
(): string => "",
|
|
32
|
+
(buffer, chunk: string) => {
|
|
33
|
+
const parts = (buffer + chunk).split(separator)
|
|
34
|
+
const tail = parts[parts.length - 1] ?? ""
|
|
35
|
+
return [tail, parts.slice(0, -1)] as const
|
|
36
|
+
},
|
|
37
|
+
{ onHalt: (tail: string) => (tail.length > 0 ? [tail] : []) },
|
|
38
|
+
),
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
// ---------------------------------------------------------------------------
|
|
42
|
+
// Public API
|
|
43
|
+
// ---------------------------------------------------------------------------
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Decode a `Stream<Uint8Array>` into a `Stream<string>` of newline-delimited
|
|
47
|
+
* lines. Empty lines are skipped. Buffers across chunk boundaries.
|
|
48
|
+
*/
|
|
49
|
+
export const fromBytes = <E, R>(
|
|
50
|
+
self: Stream.Stream<Uint8Array, E, R>,
|
|
51
|
+
): Stream.Stream<string, E, R> =>
|
|
52
|
+
self.pipe(
|
|
53
|
+
decodeText,
|
|
54
|
+
Stream.map((s) => s.replace(/\r/g, "")),
|
|
55
|
+
splitOn("\n"),
|
|
56
|
+
Stream.filter((line) => line.length > 0),
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Validate each JSONL line against a Schema. JSON parse errors and Schema
|
|
61
|
+
* decode errors both surface as a `JsonParseError` so callers can `catchTag`
|
|
62
|
+
* uniformly.
|
|
63
|
+
*/
|
|
64
|
+
export const parse =
|
|
65
|
+
<A, I>(schema: Schema.Codec<A, I>) =>
|
|
66
|
+
<E, R>(self: Stream.Stream<string, E, R>): Stream.Stream<A, JsonParseError | E, R> =>
|
|
67
|
+
self.pipe(
|
|
68
|
+
Stream.mapEffect((line) =>
|
|
69
|
+
Effect.try({
|
|
70
|
+
try: () => JSON.parse(line) as unknown,
|
|
71
|
+
catch: (cause) => new JsonParseError({ line, cause }),
|
|
72
|
+
}).pipe(
|
|
73
|
+
Effect.flatMap((value) =>
|
|
74
|
+
Schema.decodeUnknownEffect(schema)(value).pipe(
|
|
75
|
+
Effect.mapError((cause) => new JsonParseError({ line, cause })),
|
|
76
|
+
),
|
|
77
|
+
),
|
|
78
|
+
),
|
|
79
|
+
),
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
const encoder = new TextEncoder()
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Serialize a stream of values to JSONL bytes. Encodes each value via
|
|
86
|
+
* `Schema.encodeUnknownSync`. Each line ends with `\n`.
|
|
87
|
+
*/
|
|
88
|
+
export const toBytes =
|
|
89
|
+
<A, I>(schema: Schema.Codec<A, I>) =>
|
|
90
|
+
<E, R>(self: Stream.Stream<A, E, R>): Stream.Stream<Uint8Array, E, R> =>
|
|
91
|
+
self.pipe(
|
|
92
|
+
Stream.map((value) => {
|
|
93
|
+
const encoded = Schema.encodeUnknownSync(schema)(value)
|
|
94
|
+
return encoder.encode(JSON.stringify(encoded) + "\n")
|
|
95
|
+
}),
|
|
96
|
+
)
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { Stream } from "effect"
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Split a string stream on `\n`, emitting one line per element. Buffers
|
|
5
|
+
* partial chunks until a newline arrives, and flushes any non-newline
|
|
6
|
+
* tail at stream end - so streams that don't terminate with `\n`
|
|
7
|
+
* (typical of LLM token streams) still get their last line. Empty lines
|
|
8
|
+
* are dropped, `\r` is stripped (handles `\r\n` endings).
|
|
9
|
+
*
|
|
10
|
+
* Intended use: feed text deltas from a model that has been prompted to
|
|
11
|
+
* emit JSONL (or any other newline-delimited format), then parse /
|
|
12
|
+
* validate each emitted line.
|
|
13
|
+
*/
|
|
14
|
+
export const lines = <E, R>(self: Stream.Stream<string, E, R>): Stream.Stream<string, E, R> =>
|
|
15
|
+
linesStrict(Stream.concat(self, Stream.make("\n")))
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Like `lines`, but only emits lines that were terminated by `\n`. Any
|
|
19
|
+
* partial trailing content is dropped at stream end. Use when you want
|
|
20
|
+
* strict "complete-line-or-nothing" semantics.
|
|
21
|
+
*/
|
|
22
|
+
export const linesStrict = <E, R>(self: Stream.Stream<string, E, R>): Stream.Stream<string, E, R> =>
|
|
23
|
+
self.pipe(
|
|
24
|
+
Stream.mapAccum(
|
|
25
|
+
(): string => "",
|
|
26
|
+
(buffer, chunk: string) => {
|
|
27
|
+
const combined = buffer + chunk
|
|
28
|
+
const parts = combined.split("\n")
|
|
29
|
+
const tail = parts.pop() ?? ""
|
|
30
|
+
return [tail, parts.map((line) => line.replace(/\r/g, ""))] as const
|
|
31
|
+
},
|
|
32
|
+
),
|
|
33
|
+
Stream.filter((line) => line.trim().length > 0),
|
|
34
|
+
)
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { Effect, Stream } from "effect"
|
|
2
|
+
import { describe, expect, it } from "vitest"
|
|
3
|
+
import * as SSE from "./SSE.js"
|
|
4
|
+
|
|
5
|
+
const enc = new TextEncoder()
|
|
6
|
+
const bytesOf = (...chunks: ReadonlyArray<string>) =>
|
|
7
|
+
Stream.fromIterable(chunks.map((c) => enc.encode(c)))
|
|
8
|
+
|
|
9
|
+
const collect = <A, E>(s: Stream.Stream<A, E>) => Effect.runPromise(Stream.runCollect(s))
|
|
10
|
+
|
|
11
|
+
describe("SSE.fromBytes", () => {
|
|
12
|
+
it("parses a single complete event", async () => {
|
|
13
|
+
const out = await collect(SSE.fromBytes(bytesOf("event: foo\ndata: hello\n\n")))
|
|
14
|
+
expect(out).toEqual([{ event: "foo", data: "hello" }])
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
it("joins multiple data lines with \\n", async () => {
|
|
18
|
+
const out = await collect(SSE.fromBytes(bytesOf("data: line1\ndata: line2\ndata: line3\n\n")))
|
|
19
|
+
expect(out).toEqual([{ data: "line1\nline2\nline3" }])
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
it("handles events split across chunk boundaries", async () => {
|
|
23
|
+
const out = await collect(
|
|
24
|
+
SSE.fromBytes(bytesOf("event: split\nda", "ta: hi\n", "\nevent: next\ndata: x\n\n")),
|
|
25
|
+
)
|
|
26
|
+
expect(out).toEqual([
|
|
27
|
+
{ event: "split", data: "hi" },
|
|
28
|
+
{ event: "next", data: "x" },
|
|
29
|
+
])
|
|
30
|
+
})
|
|
31
|
+
|
|
32
|
+
it("handles CRLF line endings", async () => {
|
|
33
|
+
const out = await collect(SSE.fromBytes(bytesOf("event: a\r\ndata: b\r\n\r\n")))
|
|
34
|
+
expect(out).toEqual([{ event: "a", data: "b" }])
|
|
35
|
+
})
|
|
36
|
+
|
|
37
|
+
it("preserves id and skips comment lines", async () => {
|
|
38
|
+
const out = await collect(SSE.fromBytes(bytesOf(": ping\nid: 42\ndata: x\n\n")))
|
|
39
|
+
expect(out).toEqual([{ id: "42", data: "x" }])
|
|
40
|
+
})
|
|
41
|
+
|
|
42
|
+
it("flushes a trailing event without a closing blank line", async () => {
|
|
43
|
+
const out = await collect(SSE.fromBytes(bytesOf("data: tail")))
|
|
44
|
+
expect(out).toEqual([{ data: "tail" }])
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
it("ignores empty blocks between events", async () => {
|
|
48
|
+
const out = await collect(SSE.fromBytes(bytesOf("data: a\n\n\n\ndata: b\n\n")))
|
|
49
|
+
expect(out).toEqual([{ data: "a" }, { data: "b" }])
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
it("handles a UTF-8 multi-byte char split across chunks", async () => {
|
|
53
|
+
// "🦑" is 0xF0 0x9F 0xA6 0x91. Split between bytes 2 and 3.
|
|
54
|
+
const squidBytes = enc.encode("data: 🦑\n\n")
|
|
55
|
+
const a = squidBytes.slice(0, 8) // "data: " + first 2 bytes of squid
|
|
56
|
+
const b = squidBytes.slice(8) // remaining squid bytes + "\n\n"
|
|
57
|
+
const out = await collect(SSE.fromBytes(Stream.fromIterable([a, b])))
|
|
58
|
+
expect(out).toEqual([{ data: "🦑" }])
|
|
59
|
+
})
|
|
60
|
+
})
|
|
61
|
+
|
|
62
|
+
describe("SSE.toBytes round-trip", () => {
|
|
63
|
+
it("re-parses what it serializes", async () => {
|
|
64
|
+
const events: ReadonlyArray<SSE.Event> = [
|
|
65
|
+
{ event: "a", data: "hello" },
|
|
66
|
+
{ data: "multi\nline" },
|
|
67
|
+
{ event: "b", id: "7", data: "x" },
|
|
68
|
+
]
|
|
69
|
+
const reparsed = await collect(Stream.fromIterable(events).pipe(SSE.toBytes, SSE.fromBytes))
|
|
70
|
+
expect(reparsed).toEqual(events)
|
|
71
|
+
})
|
|
72
|
+
})
|