@effect-uai/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +43 -0
- package/dist/AiError-CqmYjXyx.d.mts +110 -0
- package/dist/AiError-CqmYjXyx.d.mts.map +1 -0
- package/dist/Items-D1C2686t.d.mts +372 -0
- package/dist/Items-D1C2686t.d.mts.map +1 -0
- package/dist/Loop-CzSJo1h8.d.mts +87 -0
- package/dist/Loop-CzSJo1h8.d.mts.map +1 -0
- package/dist/Outcome-C2JYknCu.d.mts +40 -0
- package/dist/Outcome-C2JYknCu.d.mts.map +1 -0
- package/dist/StructuredFormat-B5ueioNr.d.mts +88 -0
- package/dist/StructuredFormat-B5ueioNr.d.mts.map +1 -0
- package/dist/Tool-5wxOCuOh.d.mts +86 -0
- package/dist/Tool-5wxOCuOh.d.mts.map +1 -0
- package/dist/ToolEvent-B2N10hr3.d.mts +29 -0
- package/dist/ToolEvent-B2N10hr3.d.mts.map +1 -0
- package/dist/Turn-rlTfuHaQ.d.mts +211 -0
- package/dist/Turn-rlTfuHaQ.d.mts.map +1 -0
- package/dist/chunk-CfYAbeIz.mjs +13 -0
- package/dist/domain/AiError.d.mts +2 -0
- package/dist/domain/AiError.mjs +40 -0
- package/dist/domain/AiError.mjs.map +1 -0
- package/dist/domain/Items.d.mts +2 -0
- package/dist/domain/Items.mjs +238 -0
- package/dist/domain/Items.mjs.map +1 -0
- package/dist/domain/Turn.d.mts +2 -0
- package/dist/domain/Turn.mjs +82 -0
- package/dist/domain/Turn.mjs.map +1 -0
- package/dist/index.d.mts +14 -0
- package/dist/index.mjs +14 -0
- package/dist/language-model/LanguageModel.d.mts +60 -0
- package/dist/language-model/LanguageModel.d.mts.map +1 -0
- package/dist/language-model/LanguageModel.mjs +33 -0
- package/dist/language-model/LanguageModel.mjs.map +1 -0
- package/dist/loop/Loop.d.mts +2 -0
- package/dist/loop/Loop.mjs +172 -0
- package/dist/loop/Loop.mjs.map +1 -0
- package/dist/match/Match.d.mts +16 -0
- package/dist/match/Match.d.mts.map +1 -0
- package/dist/match/Match.mjs +15 -0
- package/dist/match/Match.mjs.map +1 -0
- package/dist/observability/Metrics.d.mts +45 -0
- package/dist/observability/Metrics.d.mts.map +1 -0
- package/dist/observability/Metrics.mjs +52 -0
- package/dist/observability/Metrics.mjs.map +1 -0
- package/dist/streaming/JSONL.d.mts +34 -0
- package/dist/streaming/JSONL.d.mts.map +1 -0
- package/dist/streaming/JSONL.mjs +51 -0
- package/dist/streaming/JSONL.mjs.map +1 -0
- package/dist/streaming/Lines.d.mts +27 -0
- package/dist/streaming/Lines.d.mts.map +1 -0
- package/dist/streaming/Lines.mjs +32 -0
- package/dist/streaming/Lines.mjs.map +1 -0
- package/dist/streaming/SSE.d.mts +31 -0
- package/dist/streaming/SSE.d.mts.map +1 -0
- package/dist/streaming/SSE.mjs +58 -0
- package/dist/streaming/SSE.mjs.map +1 -0
- package/dist/structured-format/StructuredFormat.d.mts +2 -0
- package/dist/structured-format/StructuredFormat.mjs +68 -0
- package/dist/structured-format/StructuredFormat.mjs.map +1 -0
- package/dist/testing/MockProvider.d.mts +48 -0
- package/dist/testing/MockProvider.d.mts.map +1 -0
- package/dist/testing/MockProvider.mjs +95 -0
- package/dist/testing/MockProvider.mjs.map +1 -0
- package/dist/tool/HistoryCheck.d.mts +24 -0
- package/dist/tool/HistoryCheck.d.mts.map +1 -0
- package/dist/tool/HistoryCheck.mjs +39 -0
- package/dist/tool/HistoryCheck.mjs.map +1 -0
- package/dist/tool/Outcome.d.mts +2 -0
- package/dist/tool/Outcome.mjs +45 -0
- package/dist/tool/Outcome.mjs.map +1 -0
- package/dist/tool/Resolvers.d.mts +44 -0
- package/dist/tool/Resolvers.d.mts.map +1 -0
- package/dist/tool/Resolvers.mjs +67 -0
- package/dist/tool/Resolvers.mjs.map +1 -0
- package/dist/tool/Tool.d.mts +2 -0
- package/dist/tool/Tool.mjs +79 -0
- package/dist/tool/Tool.mjs.map +1 -0
- package/dist/tool/ToolEvent.d.mts +2 -0
- package/dist/tool/ToolEvent.mjs +8 -0
- package/dist/tool/ToolEvent.mjs.map +1 -0
- package/dist/tool/Toolkit.d.mts +34 -0
- package/dist/tool/Toolkit.d.mts.map +1 -0
- package/dist/tool/Toolkit.mjs +105 -0
- package/dist/tool/Toolkit.mjs.map +1 -0
- package/package.json +127 -0
- package/src/domain/AiError.ts +93 -0
- package/src/domain/Items.ts +260 -0
- package/src/domain/Turn.ts +174 -0
- package/src/index.ts +13 -0
- package/src/language-model/LanguageModel.ts +73 -0
- package/src/loop/Loop.test.ts +412 -0
- package/src/loop/Loop.ts +295 -0
- package/src/match/Match.ts +9 -0
- package/src/observability/Metrics.ts +87 -0
- package/src/streaming/JSONL.test.ts +85 -0
- package/src/streaming/JSONL.ts +96 -0
- package/src/streaming/Lines.ts +34 -0
- package/src/streaming/SSE.test.ts +72 -0
- package/src/streaming/SSE.ts +114 -0
- package/src/structured-format/StructuredFormat.ts +160 -0
- package/src/testing/MockProvider.ts +161 -0
- package/src/tool/HistoryCheck.ts +49 -0
- package/src/tool/Outcome.ts +101 -0
- package/src/tool/Resolvers.test.ts +426 -0
- package/src/tool/Resolvers.ts +166 -0
- package/src/tool/Tool.ts +150 -0
- package/src/tool/ToolEvent.ts +37 -0
- package/src/tool/Toolkit.test.ts +45 -0
- package/src/tool/Toolkit.ts +228 -0
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
import { t as __exportAll } from "../chunk-CfYAbeIz.mjs";
|
|
2
|
+
import { IncompleteTurn } from "../domain/AiError.mjs";
|
|
3
|
+
import { isTurnComplete } from "../domain/Turn.mjs";
|
|
4
|
+
import { Cause, Channel, Data, Effect, Exit, Function, Option, Ref, Scope, Stream } from "effect";
|
|
5
|
+
//#region src/loop/Loop.ts
|
|
6
|
+
/**
|
|
7
|
+
* Pull-based `loop` for state-threaded sub-streams.
|
|
8
|
+
*
|
|
9
|
+
* Each iteration runs a body that returns a `Stream<Event<A, S>>`. The body
|
|
10
|
+
* emits values via `Loop.value(a)` and signals iteration control via
|
|
11
|
+
* `Loop.next(state)` (continue with new state) or `Loop.stop` (terminate).
|
|
12
|
+
* The loop unwraps `Value` events back to `A` for downstream consumers, so
|
|
13
|
+
* the resulting stream is a plain `Stream<A>`.
|
|
14
|
+
*
|
|
15
|
+
* The next body stream is only pulled when downstream pulls the outer
|
|
16
|
+
* stream - no producer fiber, no queue buffering. Cancellation, failures,
|
|
17
|
+
* scoped resources, and backpressure stay aligned with normal Stream
|
|
18
|
+
* semantics.
|
|
19
|
+
*
|
|
20
|
+
* Convention: a `Next` or `Stop` event is the terminal element of a body's
|
|
21
|
+
* iteration. Values emitted in the same chunk after one are discarded
|
|
22
|
+
* (their producing side effects may already have run). Prefer the
|
|
23
|
+
* `Loop.nextAfter` / `Loop.stopAfter` helpers to terminate cleanly.
|
|
24
|
+
*/
|
|
25
|
+
var Loop_exports = /* @__PURE__ */ __exportAll({
|
|
26
|
+
loop: () => loop,
|
|
27
|
+
next: () => next,
|
|
28
|
+
nextAfter: () => nextAfter,
|
|
29
|
+
nextAfterFold: () => nextAfterFold,
|
|
30
|
+
stop: () => stop,
|
|
31
|
+
stopAfter: () => stopAfter,
|
|
32
|
+
stopEvent: () => stopEvent,
|
|
33
|
+
streamUntilComplete: () => streamUntilComplete,
|
|
34
|
+
value: () => value
|
|
35
|
+
});
|
|
36
|
+
const Event = Data.taggedEnum();
|
|
37
|
+
/** Wrap a value so it flows through the loop to downstream consumers. */
|
|
38
|
+
const value = (a) => Event.Value({ value: a });
|
|
39
|
+
/** End the current iteration and continue with a new state. */
|
|
40
|
+
const next = (state) => Event.Next({ state });
|
|
41
|
+
/** The terminal `Stop` event. Use `stop` (the Stream) to end a loop body. */
|
|
42
|
+
const stopEvent = Event.Stop();
|
|
43
|
+
/**
|
|
44
|
+
* A single-element stream that ends the loop. Return this from a body when
|
|
45
|
+
* there's nothing else to emit; equivalent to `stopAfter(Stream.empty)` but
|
|
46
|
+
* named for the common case.
|
|
47
|
+
*/
|
|
48
|
+
const stop = Stream.succeed(stopEvent);
|
|
49
|
+
/**
|
|
50
|
+
* Pipe a raw `Stream<A>` into the loop's emit shape, then terminate the
|
|
51
|
+
* iteration with `next(state)`. Common shape for "stream this turn's
|
|
52
|
+
* deltas, then continue with updated history."
|
|
53
|
+
*/
|
|
54
|
+
const nextAfter = (stream, state) => Stream.concat(Stream.map(stream, value), Stream.fromIterable([next(state)]));
|
|
55
|
+
/**
|
|
56
|
+
* Pipe a raw `Stream<A>` into the loop's emit shape, then terminate the
|
|
57
|
+
* loop. Common shape for "stream this turn's deltas, then we're done."
|
|
58
|
+
*/
|
|
59
|
+
const stopAfter = (stream) => Stream.concat(Stream.map(stream, value), Stream.fromIterable([stopEvent]));
|
|
60
|
+
/**
|
|
61
|
+
* General `nextAfter` variant: drain `stream` to the consumer, fold elements
|
|
62
|
+
* into an accumulator, and at end-of-stream emit one `next(build(finalAcc))`.
|
|
63
|
+
*
|
|
64
|
+
* Subsumes `nextAfter` when state is constant (`reduce: (s, _) => s`,
|
|
65
|
+
* `build: (s) => s`). Used by `Toolkit.nextStateFrom` to collect tool
|
|
66
|
+
* results and build next state without exposing a Ref to recipes.
|
|
67
|
+
*/
|
|
68
|
+
const nextAfterFold = (stream, initial, reduce, build) => Stream.unwrap(Effect.gen(function* () {
|
|
69
|
+
const ref = yield* Ref.make(initial);
|
|
70
|
+
const tapped = stream.pipe(Stream.tap((a) => Ref.update(ref, (acc) => reduce(acc, a))), Stream.map(value));
|
|
71
|
+
const continuation = Stream.fromEffect(Ref.get(ref).pipe(Effect.map((acc) => next(build(acc)))));
|
|
72
|
+
return tapped.pipe(Stream.concat(continuation));
|
|
73
|
+
}));
|
|
74
|
+
/**
|
|
75
|
+
* Lift a provider's `Stream<TurnEvent>` into a loop body's `Stream<Event<TurnEvent | A, S>>`.
|
|
76
|
+
* Each delta passes through as `value(delta)` (including the terminal
|
|
77
|
+
* `turn_complete`, so the consumer sees turn boundaries naturally). Once
|
|
78
|
+
* the terminal arrives, `then(turn)` runs and its returned stream of loop
|
|
79
|
+
* events (typically tool outputs followed by `next(state)` or `stop`) is
|
|
80
|
+
* concatenated.
|
|
81
|
+
*
|
|
82
|
+
* Pre-pipe transforms (`Stream.tap` / `Stream.map` / `Stream.filter`) on
|
|
83
|
+
* the raw delta stream cover anything an `emit`-style callback would do.
|
|
84
|
+
*
|
|
85
|
+
* If the upstream ends without a `turn_complete`, the resulting stream
|
|
86
|
+
* fails with `AiError.IncompleteTurn`. Catch it via `Stream.catchTag` if
|
|
87
|
+
* you want to recover.
|
|
88
|
+
*/
|
|
89
|
+
const streamUntilComplete = (then) => (deltas) => Stream.unwrap(Effect.gen(function* () {
|
|
90
|
+
const turnRef = yield* Ref.make(Option.none());
|
|
91
|
+
const events = deltas.pipe(Stream.tap((delta) => isTurnComplete(delta) ? Ref.set(turnRef, Option.some(delta.turn)) : Effect.void), Stream.map(value));
|
|
92
|
+
const continuation = Stream.unwrap(Effect.gen(function* () {
|
|
93
|
+
const opt = yield* Ref.get(turnRef);
|
|
94
|
+
if (Option.isNone(opt)) return yield* Effect.fail(new IncompleteTurn({}));
|
|
95
|
+
return yield* then(opt.value);
|
|
96
|
+
}));
|
|
97
|
+
return Stream.concat(events, continuation);
|
|
98
|
+
}));
|
|
99
|
+
const isNonEmpty = (array) => array.length > 0;
|
|
100
|
+
const closeBody = (current, exit) => Scope.close(current.scope, exit);
|
|
101
|
+
/**
|
|
102
|
+
* Walk a chunk of `Event<A, S>` until a terminal `Next` or `Stop` is found.
|
|
103
|
+
* Returns the unwrapped values seen so far and (optionally) the terminal
|
|
104
|
+
* event. Anything in the chunk after the terminal is discarded - its
|
|
105
|
+
* producing side effects may have run, but downstream never sees it.
|
|
106
|
+
*/
|
|
107
|
+
const partitionChunk = (chunk) => {
|
|
108
|
+
const values = [];
|
|
109
|
+
for (let i = 0; i < chunk.length; i++) {
|
|
110
|
+
const event = chunk[i];
|
|
111
|
+
if (event._tag === "Value") values.push(event.value);
|
|
112
|
+
else return {
|
|
113
|
+
values,
|
|
114
|
+
decision: event
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
return {
|
|
118
|
+
values,
|
|
119
|
+
decision: void 0
|
|
120
|
+
};
|
|
121
|
+
};
|
|
122
|
+
/**
|
|
123
|
+
* Drive a state-threaded loop body. Each iteration runs `body(state)` to get
|
|
124
|
+
* a `Stream<Event<A, S>>`; values flow downstream, `next(s)` continues with
|
|
125
|
+
* a new state, `stop` ends the loop. See the file header for the full
|
|
126
|
+
* pull-based execution model.
|
|
127
|
+
*
|
|
128
|
+
* Dual: data-first `loop(initial, body)` and data-last `loop(body)(initial)`
|
|
129
|
+
* (or `pipe(initial, loop(body))`) both work.
|
|
130
|
+
*/
|
|
131
|
+
const loop = Function.dual(2, (initial, body) => Stream.scoped(Stream.fromPull(Effect.gen(function* () {
|
|
132
|
+
const outerScope = yield* Effect.scope;
|
|
133
|
+
let state = initial;
|
|
134
|
+
let current;
|
|
135
|
+
let done = false;
|
|
136
|
+
const closeActive = (active, exit) => {
|
|
137
|
+
if (current === active) current = void 0;
|
|
138
|
+
return closeBody(active, exit);
|
|
139
|
+
};
|
|
140
|
+
yield* Scope.addFinalizerExit(outerScope, (exit) => current === void 0 ? Effect.void : closeActive(current, exit));
|
|
141
|
+
return Effect.gen(function* () {
|
|
142
|
+
while (true) {
|
|
143
|
+
if (done) return yield* Cause.done();
|
|
144
|
+
if (current === void 0) {
|
|
145
|
+
const result = body(state);
|
|
146
|
+
const stream = Effect.isEffect(result) ? Stream.unwrap(result) : result;
|
|
147
|
+
const bodyScope = yield* Scope.fork(outerScope);
|
|
148
|
+
current = {
|
|
149
|
+
scope: bodyScope,
|
|
150
|
+
pull: yield* Channel.toPullScoped(Stream.toChannel(stream), bodyScope).pipe(Effect.onError((cause) => Scope.close(bodyScope, Exit.failCause(cause))))
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
const active = current;
|
|
154
|
+
const chunk = yield* active.pull.pipe(Effect.catchIf(Cause.isDone, () => closeActive(active, Exit.void).pipe(Effect.as(void 0))), Effect.onError((cause) => closeActive(active, Exit.failCause(cause))));
|
|
155
|
+
if (chunk === void 0) {
|
|
156
|
+
done = true;
|
|
157
|
+
return yield* Cause.done();
|
|
158
|
+
}
|
|
159
|
+
const { values, decision } = partitionChunk(chunk);
|
|
160
|
+
if (decision !== void 0) {
|
|
161
|
+
yield* closeActive(active, Exit.void);
|
|
162
|
+
if (decision._tag === "Stop") done = true;
|
|
163
|
+
else if (decision._tag === "Next") state = decision.state;
|
|
164
|
+
}
|
|
165
|
+
if (isNonEmpty(values)) return values;
|
|
166
|
+
}
|
|
167
|
+
});
|
|
168
|
+
}))));
|
|
169
|
+
//#endregion
|
|
170
|
+
export { loop, next, nextAfter, nextAfterFold, stop, stopAfter, stopEvent, streamUntilComplete, Loop_exports as t, value };
|
|
171
|
+
|
|
172
|
+
//# sourceMappingURL=Loop.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Loop.mjs","names":[],"sources":["../../src/loop/Loop.ts"],"sourcesContent":["/**\n * Pull-based `loop` for state-threaded sub-streams.\n *\n * Each iteration runs a body that returns a `Stream<Event<A, S>>`. The body\n * emits values via `Loop.value(a)` and signals iteration control via\n * `Loop.next(state)` (continue with new state) or `Loop.stop` (terminate).\n * The loop unwraps `Value` events back to `A` for downstream consumers, so\n * the resulting stream is a plain `Stream<A>`.\n *\n * The next body stream is only pulled when downstream pulls the outer\n * stream - no producer fiber, no queue buffering. Cancellation, failures,\n * scoped resources, and backpressure stay aligned with normal Stream\n * semantics.\n *\n * Convention: a `Next` or `Stop` event is the terminal element of a body's\n * iteration. Values emitted in the same chunk after one are discarded\n * (their producing side effects may already have run). Prefer the\n * `Loop.nextAfter` / `Loop.stopAfter` helpers to terminate cleanly.\n */\nimport { Cause, Channel, Data, Effect, Exit, Function, Option, Ref, Scope, Stream } from \"effect\"\nimport { IncompleteTurn } from \"../domain/AiError.js\"\nimport { isTurnComplete, type Turn, type TurnEvent } from \"../domain/Turn.js\"\n\n// ---------------------------------------------------------------------------\n// Event type - the body's emit shape\n// ---------------------------------------------------------------------------\n\n/**\n * The tagged union a body emits per pull. `Value` carries a payload that\n * flows downstream. `Next` ends the current iteration and continues with a\n * new state. `Stop` ends the loop entirely.\n */\nexport type Event<A, S> = Data.TaggedEnum<{\n Value: { readonly value: A }\n Next: { readonly state: S }\n Stop: {}\n}>\n\ninterface EventDef extends Data.TaggedEnum.WithGenerics<2> {\n readonly taggedEnum: Event<this[\"A\"], this[\"B\"]>\n}\n\nconst Event = Data.taggedEnum<EventDef>()\n\n/** Wrap a value so it flows through the loop to downstream consumers. */\nexport const value = <A>(a: A): Event<A, never> => Event.Value({ value: a })\n\n/** End the current iteration and continue with a new state. */\nexport const next = <S>(state: S): Event<never, S> => Event.Next({ state })\n\n/** The terminal `Stop` event. Use `stop` (the Stream) to end a loop body. */\nexport const stopEvent: Event<never, never> = Event.Stop()\n\n/**\n * A single-element stream that ends the loop. Return this from a body when\n * there's nothing else to emit; equivalent to `stopAfter(Stream.empty)` but\n * named for the common case.\n */\nexport const stop: Stream.Stream<Event<never, never>> = Stream.succeed(stopEvent)\n\n/**\n * Pipe a raw `Stream<A>` into the loop's emit shape, then terminate the\n * iteration with `next(state)`. Common shape for \"stream this turn's\n * deltas, then continue with updated history.\"\n */\nexport const nextAfter = <S, A, E, R>(\n stream: Stream.Stream<A, E, R>,\n state: S,\n): Stream.Stream<Event<A, S>, E, R> =>\n Stream.concat(Stream.map(stream, value), Stream.fromIterable([next(state)]))\n\n/**\n * Pipe a raw `Stream<A>` into the loop's emit shape, then terminate the\n * loop. Common shape for \"stream this turn's deltas, then we're done.\"\n */\nexport const stopAfter = <A, E, R>(\n stream: Stream.Stream<A, E, R>,\n): Stream.Stream<Event<A, never>, E, R> =>\n Stream.concat(Stream.map(stream, value), Stream.fromIterable([stopEvent]))\n\n/**\n * General `nextAfter` variant: drain `stream` to the consumer, fold elements\n * into an accumulator, and at end-of-stream emit one `next(build(finalAcc))`.\n *\n * Subsumes `nextAfter` when state is constant (`reduce: (s, _) => s`,\n * `build: (s) => s`). Used by `Toolkit.nextStateFrom` to collect tool\n * results and build next state without exposing a Ref to recipes.\n */\nexport const nextAfterFold = <A, B, S, E, R>(\n stream: Stream.Stream<A, E, R>,\n initial: B,\n reduce: (acc: B, a: A) => B,\n build: (b: B) => S,\n): Stream.Stream<Event<A, S>, E, R> =>\n Stream.unwrap(\n Effect.gen(function* () {\n const ref = yield* Ref.make(initial)\n const tapped = stream.pipe(\n Stream.tap((a) => Ref.update(ref, (acc) => reduce(acc, a))),\n Stream.map(value),\n )\n const continuation = Stream.fromEffect(\n Ref.get(ref).pipe(Effect.map((acc) => next(build(acc)))),\n )\n return tapped.pipe(Stream.concat(continuation))\n }),\n )\n\n// ---------------------------------------------------------------------------\n// streamUntilComplete - turn-aware stream operator for loop bodies\n// ---------------------------------------------------------------------------\n\n/**\n * Lift a provider's `Stream<TurnEvent>` into a loop body's `Stream<Event<TurnEvent | A, S>>`.\n * Each delta passes through as `value(delta)` (including the terminal\n * `turn_complete`, so the consumer sees turn boundaries naturally). Once\n * the terminal arrives, `then(turn)` runs and its returned stream of loop\n * events (typically tool outputs followed by `next(state)` or `stop`) is\n * concatenated.\n *\n * Pre-pipe transforms (`Stream.tap` / `Stream.map` / `Stream.filter`) on\n * the raw delta stream cover anything an `emit`-style callback would do.\n *\n * If the upstream ends without a `turn_complete`, the resulting stream\n * fails with `AiError.IncompleteTurn`. Catch it via `Stream.catchTag` if\n * you want to recover.\n */\nexport const streamUntilComplete =\n <S, A, E2 = never, R2 = never>(\n then: (turn: Turn) => Effect.Effect<Stream.Stream<Event<A, S>, E2, R2>, E2, R2>,\n ) =>\n <E, R>(\n deltas: Stream.Stream<TurnEvent, E, R>,\n ): Stream.Stream<Event<TurnEvent | A, S>, E | E2 | IncompleteTurn, R | R2> =>\n Stream.unwrap(\n Effect.gen(function* () {\n const turnRef = yield* Ref.make<Option.Option<Turn>>(Option.none())\n\n const events: Stream.Stream<Event<TurnEvent, S>, E, R> = deltas.pipe(\n Stream.tap((delta) =>\n isTurnComplete(delta) ? Ref.set(turnRef, Option.some(delta.turn)) : Effect.void,\n ),\n Stream.map(value),\n )\n\n const continuation = Stream.unwrap(\n Effect.gen(function* () {\n const opt = yield* Ref.get(turnRef)\n if (Option.isNone(opt)) return yield* Effect.fail(new IncompleteTurn({}))\n return yield* then(opt.value)\n }),\n )\n\n return Stream.concat(events, continuation)\n }),\n )\n\n// ---------------------------------------------------------------------------\n// Internal helpers\n// ---------------------------------------------------------------------------\n\nconst isNonEmpty = <A>(array: ReadonlyArray<A>): array is readonly [A, ...Array<A>] =>\n array.length > 0\n\ninterface CurrentBody<S, A, E, R> {\n readonly scope: Scope.Closeable\n readonly pull: Effect.Effect<ReadonlyArray<Event<A, S>>, E | Cause.Done<void>, R>\n}\n\nconst closeBody = <S, A, E, R>(\n current: CurrentBody<S, A, E, R>,\n exit: Exit.Exit<unknown, unknown>,\n) => Scope.close(current.scope, exit)\n\n/**\n * Walk a chunk of `Event<A, S>` until a terminal `Next` or `Stop` is found.\n * Returns the unwrapped values seen so far and (optionally) the terminal\n * event. Anything in the chunk after the terminal is discarded - its\n * producing side effects may have run, but downstream never sees it.\n */\nconst partitionChunk = <A, S>(\n chunk: ReadonlyArray<Event<A, S>>,\n): { readonly values: Array<A>; readonly decision: Event<A, S> | undefined } => {\n const values: Array<A> = []\n for (let i = 0; i < chunk.length; i++) {\n const event = chunk[i]!\n if (event._tag === \"Value\") {\n values.push(event.value)\n } else {\n return { values, decision: event }\n }\n }\n return { values, decision: undefined }\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\ntype LoopBody<S, A, E, R> = (\n state: S,\n) => Stream.Stream<Event<A, S>, E, R> | Effect.Effect<Stream.Stream<Event<A, S>, E, R>, E, R>\n\n/**\n * Drive a state-threaded loop body. Each iteration runs `body(state)` to get\n * a `Stream<Event<A, S>>`; values flow downstream, `next(s)` continues with\n * a new state, `stop` ends the loop. See the file header for the full\n * pull-based execution model.\n *\n * Dual: data-first `loop(initial, body)` and data-last `loop(body)(initial)`\n * (or `pipe(initial, loop(body))`) both work.\n */\nexport const loop: {\n <S, A, E, R>(body: LoopBody<S, A, E, R>): (initial: S) => Stream.Stream<A, E, R>\n <S, A, E, R>(initial: S, body: LoopBody<S, A, E, R>): Stream.Stream<A, E, R>\n} = Function.dual(\n 2,\n <S, A, E, R>(initial: S, body: LoopBody<S, A, E, R>): Stream.Stream<A, E, R> =>\n Stream.scoped(\n Stream.fromPull(\n Effect.gen(function* () {\n const outerScope = yield* Effect.scope\n let state = initial\n let current: CurrentBody<S, A, E, R> | undefined\n let done = false\n\n const closeActive = (\n active: CurrentBody<S, A, E, R>,\n exit: Exit.Exit<unknown, unknown>,\n ) => {\n const isActive = current === active\n if (isActive) current = undefined\n // Scope.close is idempotent. Multiple paths can race to close the\n // active body during cancellation/failure, so closing twice is safe.\n return closeBody(active, exit)\n }\n\n yield* Scope.addFinalizerExit(outerScope, (exit) =>\n current === undefined ? Effect.void : closeActive(current, exit),\n )\n\n const pull = Effect.gen(function* () {\n while (true) {\n if (done) return yield* Cause.done()\n\n if (current === undefined) {\n const result = body(state)\n const stream = Effect.isEffect(result) ? Stream.unwrap(result) : result\n const bodyScope = yield* Scope.fork(outerScope)\n const bodyPull = yield* Channel.toPullScoped(\n Stream.toChannel(stream),\n bodyScope,\n ).pipe(Effect.onError((cause) => Scope.close(bodyScope, Exit.failCause(cause))))\n current = { scope: bodyScope, pull: bodyPull }\n }\n\n const active = current\n const chunk = yield* active.pull.pipe(\n Effect.catchIf(Cause.isDone, () =>\n closeActive(active, Exit.void).pipe(\n Effect.as(undefined as ReadonlyArray<Event<A, S>> | undefined),\n ),\n ),\n Effect.onError((cause) => closeActive(active, Exit.failCause(cause))),\n )\n\n if (chunk === undefined) {\n done = true\n return yield* Cause.done()\n }\n\n const { values, decision } = partitionChunk(chunk)\n\n if (decision !== undefined) {\n yield* closeActive(active, Exit.void)\n if (decision._tag === \"Stop\") {\n done = true\n } else if (decision._tag === \"Next\") {\n state = decision.state\n }\n }\n\n // Emit the values seen so far if any. Chunks from a Stream pull\n // are non-empty, so when `decision === undefined` every event was\n // a `Value` and `values` is non-empty here. With a decision and\n // no preceding values, fall through to the next iteration.\n if (isNonEmpty(values)) return values\n }\n })\n\n return pull\n }),\n ),\n ),\n)\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0CA,MAAM,QAAQ,KAAK,YAAsB;;AAGzC,MAAa,SAAY,MAA0B,MAAM,MAAM,EAAE,OAAO,GAAG,CAAC;;AAG5E,MAAa,QAAW,UAA8B,MAAM,KAAK,EAAE,OAAO,CAAC;;AAG3E,MAAa,YAAiC,MAAM,MAAM;;;;;;AAO1D,MAAa,OAA2C,OAAO,QAAQ,UAAU;;;;;;AAOjF,MAAa,aACX,QACA,UAEA,OAAO,OAAO,OAAO,IAAI,QAAQ,MAAM,EAAE,OAAO,aAAa,CAAC,KAAK,MAAM,CAAC,CAAC,CAAC;;;;;AAM9E,MAAa,aACX,WAEA,OAAO,OAAO,OAAO,IAAI,QAAQ,MAAM,EAAE,OAAO,aAAa,CAAC,UAAU,CAAC,CAAC;;;;;;;;;AAU5E,MAAa,iBACX,QACA,SACA,QACA,UAEA,OAAO,OACL,OAAO,IAAI,aAAa;CACtB,MAAM,MAAM,OAAO,IAAI,KAAK,QAAQ;CACpC,MAAM,SAAS,OAAO,KACpB,OAAO,KAAK,MAAM,IAAI,OAAO,MAAM,QAAQ,OAAO,KAAK,EAAE,CAAC,CAAC,EAC3D,OAAO,IAAI,MAAM,CAClB;CACD,MAAM,eAAe,OAAO,WAC1B,IAAI,IAAI,IAAI,CAAC,KAAK,OAAO,KAAK,QAAQ,KAAK,MAAM,IAAI,CAAC,CAAC,CAAC,CACzD;AACD,QAAO,OAAO,KAAK,OAAO,OAAO,aAAa,CAAC;EAC/C,CACH;;;;;;;;;;;;;;;;AAqBH,MAAa,uBAET,UAGA,WAEA,OAAO,OACL,OAAO,IAAI,aAAa;CACtB,MAAM,UAAU,OAAO,IAAI,KAA0B,OAAO,MAAM,CAAC;CAEnE,MAAM,SAAmD,OAAO,KAC9D,OAAO,KAAK,UACV,eAAe,MAAM,GAAG,IAAI,IAAI,SAAS,OAAO,KAAK,MAAM,KAAK,CAAC,GAAG,OAAO,KAC5E,EACD,OAAO,IAAI,MAAM,CAClB;CAED,MAAM,eAAe,OAAO,OAC1B,OAAO,IAAI,aAAa;EACtB,MAAM,MAAM,OAAO,IAAI,IAAI,QAAQ;AACnC,MAAI,OAAO,OAAO,IAAI,CAAE,QAAO,OAAO,OAAO,KAAK,IAAI,eAAe,EAAE,CAAC,CAAC;AACzE,SAAO,OAAO,KAAK,IAAI,MAAM;GAC7B,CACH;AAED,QAAO,OAAO,OAAO,QAAQ,aAAa;EAC1C,CACH;AAML,MAAM,cAAiB,UACrB,MAAM,SAAS;AAOjB,MAAM,aACJ,SACA,SACG,MAAM,MAAM,QAAQ,OAAO,KAAK;;;;;;;AAQrC,MAAM,kBACJ,UAC8E;CAC9E,MAAM,SAAmB,EAAE;AAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;EACrC,MAAM,QAAQ,MAAM;AACpB,MAAI,MAAM,SAAS,QACjB,QAAO,KAAK,MAAM,MAAM;MAExB,QAAO;GAAE;GAAQ,UAAU;GAAO;;AAGtC,QAAO;EAAE;EAAQ,UAAU,KAAA;EAAW;;;;;;;;;;;AAoBxC,MAAa,OAGT,SAAS,KACX,IACa,SAAY,SACvB,OAAO,OACL,OAAO,SACL,OAAO,IAAI,aAAa;CACtB,MAAM,aAAa,OAAO,OAAO;CACjC,IAAI,QAAQ;CACZ,IAAI;CACJ,IAAI,OAAO;CAEX,MAAM,eACJ,QACA,SACG;AAEH,MADiB,YAAY,OACf,WAAU,KAAA;AAGxB,SAAO,UAAU,QAAQ,KAAK;;AAGhC,QAAO,MAAM,iBAAiB,aAAa,SACzC,YAAY,KAAA,IAAY,OAAO,OAAO,YAAY,SAAS,KAAK,CACjE;AAmDD,QAjDa,OAAO,IAAI,aAAa;AACnC,SAAO,MAAM;AACX,OAAI,KAAM,QAAO,OAAO,MAAM,MAAM;AAEpC,OAAI,YAAY,KAAA,GAAW;IACzB,MAAM,SAAS,KAAK,MAAM;IAC1B,MAAM,SAAS,OAAO,SAAS,OAAO,GAAG,OAAO,OAAO,OAAO,GAAG;IACjE,MAAM,YAAY,OAAO,MAAM,KAAK,WAAW;AAK/C,cAAU;KAAE,OAAO;KAAW,MAAM,OAJZ,QAAQ,aAC9B,OAAO,UAAU,OAAO,EACxB,UACD,CAAC,KAAK,OAAO,SAAS,UAAU,MAAM,MAAM,WAAW,KAAK,UAAU,MAAM,CAAC,CAAC,CAAC;KAClC;;GAGhD,MAAM,SAAS;GACf,MAAM,QAAQ,OAAO,OAAO,KAAK,KAC/B,OAAO,QAAQ,MAAM,cACnB,YAAY,QAAQ,KAAK,KAAK,CAAC,KAC7B,OAAO,GAAG,KAAA,EAAoD,CAC/D,CACF,EACD,OAAO,SAAS,UAAU,YAAY,QAAQ,KAAK,UAAU,MAAM,CAAC,CAAC,CACtE;AAED,OAAI,UAAU,KAAA,GAAW;AACvB,WAAO;AACP,WAAO,OAAO,MAAM,MAAM;;GAG5B,MAAM,EAAE,QAAQ,aAAa,eAAe,MAAM;AAElD,OAAI,aAAa,KAAA,GAAW;AAC1B,WAAO,YAAY,QAAQ,KAAK,KAAK;AACrC,QAAI,SAAS,SAAS,OACpB,QAAO;aACE,SAAS,SAAS,OAC3B,SAAQ,SAAS;;AAQrB,OAAI,WAAW,OAAO,CAAE,QAAO;;GAIxB;EACX,CACH,CACF,CACJ"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Match } from "effect";
|
|
2
|
+
|
|
3
|
+
//#region src/match/Match.d.ts
|
|
4
|
+
declare namespace Match_d_exports {
|
|
5
|
+
export { matchType };
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Dispatch on the `type` discriminator of a tagged union. Equivalent to
|
|
9
|
+
* `Match.discriminator("type")`, exposed as a named helper because the
|
|
10
|
+
* `type` field is the framework's convention for `Item`, `TurnEvent`,
|
|
11
|
+
* `ContentBlock`, and most provider wire types.
|
|
12
|
+
*/
|
|
13
|
+
declare const matchType: <R, P extends Match.Types.Tags<"type", R> & string, Ret, Fn extends (_: Extract<R, Record<"type", P>>) => Ret>(...pattern: [first: P, ...values: P[], f: Fn]) => <I, F, A, Pr>(self: Match.Matcher<I, F, R, A, Pr, Ret>) => Match.Matcher<I, Match.Types.AddWithout<F, Extract<R, Record<"type", P>>>, Match.Types.ApplyFilters<I, Match.Types.AddWithout<F, Extract<R, Record<"type", P>>>>, A | ReturnType<Fn>, Pr, Ret>;
|
|
14
|
+
//#endregion
|
|
15
|
+
export { matchType, Match_d_exports as t };
|
|
16
|
+
//# sourceMappingURL=Match.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Match.d.mts","names":[],"sources":["../../src/match/Match.ts"],"mappings":";;;;;;;;;;AAQA;;cAAa,SAAA,gBAAS,KAAA,CAAA,KAAA,CAAA,IAAA,SAAA,CAAA,6BAAA,CAAA,EAAA,OAAA,CAAA,CAAA,EAAA,MAAA,SAAA,CAAA,OAAA,GAAA,KAAA,OAAA,GAAA,KAAA,EAAA,CAAA,KAAA,MAAA,EAAA,CAAA,IAAA,CAAA,EAAA,EAAA,oBAAA,IAAA,EAAA,KAAA,CAAA,OAAA,CAAA,CAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,EAAA,EAAA,EAAA,GAAA,MAAA,KAAA,CAAA,OAAA,CAAA,CAAA,EAAA,KAAA,CAAA,KAAA,CAAA,UAAA,CAAA,CAAA,EAAA,OAAA,CAAA,CAAA,EAAA,MAAA,SAAA,CAAA,KAAA,KAAA,CAAA,KAAA,CAAA,YAAA,CAAA,CAAA,EAAA,KAAA,CAAA,KAAA,CAAA,UAAA,CAAA,CAAA,EAAA,OAAA,CAAA,CAAA,EAAA,MAAA,SAAA,CAAA,MAAA,CAAA,GAAA,UAAA,CAAA,EAAA,GAAA,EAAA,EAAA,GAAA"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { t as __exportAll } from "../chunk-CfYAbeIz.mjs";
|
|
2
|
+
import { Match } from "effect";
|
|
3
|
+
//#region src/match/Match.ts
|
|
4
|
+
var Match_exports = /* @__PURE__ */ __exportAll({ matchType: () => matchType });
|
|
5
|
+
/**
|
|
6
|
+
* Dispatch on the `type` discriminator of a tagged union. Equivalent to
|
|
7
|
+
* `Match.discriminator("type")`, exposed as a named helper because the
|
|
8
|
+
* `type` field is the framework's convention for `Item`, `TurnEvent`,
|
|
9
|
+
* `ContentBlock`, and most provider wire types.
|
|
10
|
+
*/
|
|
11
|
+
const matchType = Match.discriminator("type");
|
|
12
|
+
//#endregion
|
|
13
|
+
export { matchType, Match_exports as t };
|
|
14
|
+
|
|
15
|
+
//# sourceMappingURL=Match.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Match.mjs","names":[],"sources":["../../src/match/Match.ts"],"sourcesContent":["import { Match } from \"effect\"\n\n/**\n * Dispatch on the `type` discriminator of a tagged union. Equivalent to\n * `Match.discriminator(\"type\")`, exposed as a named helper because the\n * `type` field is the framework's convention for `Item`, `TurnEvent`,\n * `ContentBlock`, and most provider wire types.\n */\nexport const matchType = Match.discriminator(\"type\")\n"],"mappings":";;;;;;;;;;AAQA,MAAa,YAAY,MAAM,cAAc,OAAO"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { Duration, Effect, Option, Stream } from "effect";
|
|
2
|
+
|
|
3
|
+
//#region src/observability/Metrics.d.ts
|
|
4
|
+
declare namespace Metrics_d_exports {
|
|
5
|
+
export { RatePoint, timeToFirst, withElapsed, withRate };
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Annotate every event in a stream with the elapsed `Duration` since the
|
|
9
|
+
* stream started consuming. The first event reports its time-from-start,
|
|
10
|
+
* which is also the conventional "time to first ____" metric.
|
|
11
|
+
*/
|
|
12
|
+
declare const withElapsed: <A, E, R>(self: Stream.Stream<A, E, R>) => Stream.Stream<{
|
|
13
|
+
readonly value: A;
|
|
14
|
+
readonly elapsed: Duration.Duration;
|
|
15
|
+
}, E, R>;
|
|
16
|
+
/**
|
|
17
|
+
* Compute the elapsed time until the first event matching the predicate.
|
|
18
|
+
* Returns `Option.none()` if the stream completes without one.
|
|
19
|
+
*
|
|
20
|
+
* Consumes the stream. To track this *alongside* live consumption, use
|
|
21
|
+
* `Stream.broadcast` to fan the source out and run `timeToFirst` on one
|
|
22
|
+
* branch.
|
|
23
|
+
*/
|
|
24
|
+
declare const timeToFirst: <A>(predicate: (a: A) => boolean) => <E, R>(self: Stream.Stream<A, E, R>) => Effect.Effect<Option.Option<Duration.Duration>, E, R>;
|
|
25
|
+
interface RatePoint<A> {
|
|
26
|
+
readonly value: A;
|
|
27
|
+
readonly total: number;
|
|
28
|
+
readonly ratePerSecond: number;
|
|
29
|
+
readonly elapsed: Duration.Duration;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Annotate every event with a running total and a rolling rate per second,
|
|
33
|
+
* computed from a user-supplied weight function.
|
|
34
|
+
*
|
|
35
|
+
* The weight is the unit you care about - bytes, tokens, error count, etc.
|
|
36
|
+
* For tokens-per-second on `TurnEvent`, pass:
|
|
37
|
+
*
|
|
38
|
+
* `(d) => d.type === "text_delta" ? countTokens(d.text) : 0`
|
|
39
|
+
*
|
|
40
|
+
* Use any tokenizer you like; the library does not ship one.
|
|
41
|
+
*/
|
|
42
|
+
declare const withRate: <A>(weight: (a: A) => number) => <E, R>(self: Stream.Stream<A, E, R>) => Stream.Stream<RatePoint<A>, E, R>;
|
|
43
|
+
//#endregion
|
|
44
|
+
export { RatePoint, Metrics_d_exports as t, timeToFirst, withElapsed, withRate };
|
|
45
|
+
//# sourceMappingURL=Metrics.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Metrics.d.mts","names":[],"sources":["../../src/observability/Metrics.ts"],"mappings":";;;;;;;;;;;cAOa,WAAA,YACX,IAAA,EAAM,MAAA,CAAO,MAAA,CAAO,CAAA,EAAG,CAAA,EAAG,CAAA,MACzB,MAAA,CAAO,MAAA;EAAA,SAAkB,KAAA,EAAO,CAAA;EAAA,SAAY,OAAA,EAAS,QAAA,CAAS,QAAA;AAAA,GAAY,CAAA,EAAG,CAAA;AAFhF;;;;;;;;AAAA,cAwBa,WAAA,MACP,SAAA,GAAY,CAAA,EAAG,CAAA,wBACZ,IAAA,EAAM,MAAA,CAAO,MAAA,CAAO,CAAA,EAAG,CAAA,EAAG,CAAA,MAAK,MAAA,CAAO,MAAA,CAAO,MAAA,CAAO,MAAA,CAAO,QAAA,CAAS,QAAA,GAAW,CAAA,EAAG,CAAA;AAAA,UAO1E,SAAA;EAAA,SACN,KAAA,EAAO,CAAA;EAAA,SACP,KAAA;EAAA,SACA,aAAA;EAAA,SACA,OAAA,EAAS,QAAA,CAAS,QAAA;AAAA;;;;;;;;;;;;cAchB,QAAA,MACP,MAAA,GAAS,CAAA,EAAG,CAAA,uBACT,IAAA,EAAM,MAAA,CAAO,MAAA,CAAO,CAAA,EAAG,CAAA,EAAG,CAAA,MAAK,MAAA,CAAO,MAAA,CAAO,SAAA,CAAU,CAAA,GAAI,CAAA,EAAG,CAAA"}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { t as __exportAll } from "../chunk-CfYAbeIz.mjs";
|
|
2
|
+
import { Clock, Duration, Effect, Option, Stream } from "effect";
|
|
3
|
+
//#region src/observability/Metrics.ts
|
|
4
|
+
var Metrics_exports = /* @__PURE__ */ __exportAll({
|
|
5
|
+
timeToFirst: () => timeToFirst,
|
|
6
|
+
withElapsed: () => withElapsed,
|
|
7
|
+
withRate: () => withRate
|
|
8
|
+
});
|
|
9
|
+
/**
|
|
10
|
+
* Annotate every event in a stream with the elapsed `Duration` since the
|
|
11
|
+
* stream started consuming. The first event reports its time-from-start,
|
|
12
|
+
* which is also the conventional "time to first ____" metric.
|
|
13
|
+
*/
|
|
14
|
+
const withElapsed = (self) => Stream.unwrap(Effect.map(Clock.currentTimeMillis, (start) => self.pipe(Stream.mapEffect((value) => Effect.map(Clock.currentTimeMillis, (now) => ({
|
|
15
|
+
value,
|
|
16
|
+
elapsed: Duration.millis(now - start)
|
|
17
|
+
}))))));
|
|
18
|
+
/**
|
|
19
|
+
* Compute the elapsed time until the first event matching the predicate.
|
|
20
|
+
* Returns `Option.none()` if the stream completes without one.
|
|
21
|
+
*
|
|
22
|
+
* Consumes the stream. To track this *alongside* live consumption, use
|
|
23
|
+
* `Stream.broadcast` to fan the source out and run `timeToFirst` on one
|
|
24
|
+
* branch.
|
|
25
|
+
*/
|
|
26
|
+
const timeToFirst = (predicate) => (self) => withElapsed(self).pipe(Stream.filter(({ value }) => predicate(value)), Stream.runHead, Effect.map(Option.map(({ elapsed }) => elapsed)));
|
|
27
|
+
/**
|
|
28
|
+
* Annotate every event with a running total and a rolling rate per second,
|
|
29
|
+
* computed from a user-supplied weight function.
|
|
30
|
+
*
|
|
31
|
+
* The weight is the unit you care about - bytes, tokens, error count, etc.
|
|
32
|
+
* For tokens-per-second on `TurnEvent`, pass:
|
|
33
|
+
*
|
|
34
|
+
* `(d) => d.type === "text_delta" ? countTokens(d.text) : 0`
|
|
35
|
+
*
|
|
36
|
+
* Use any tokenizer you like; the library does not ship one.
|
|
37
|
+
*/
|
|
38
|
+
const withRate = (weight) => (self) => Stream.unwrap(Effect.map(Clock.currentTimeMillis, (start) => self.pipe(Stream.mapAccumEffect(() => ({ total: 0 }), (acc, value) => Effect.map(Clock.currentTimeMillis, (now) => {
|
|
39
|
+
const total = acc.total + weight(value);
|
|
40
|
+
const elapsedMs = now - start;
|
|
41
|
+
const ratePerSecond = elapsedMs > 0 ? total / elapsedMs * 1e3 : 0;
|
|
42
|
+
return [{ total }, [{
|
|
43
|
+
value,
|
|
44
|
+
total,
|
|
45
|
+
ratePerSecond,
|
|
46
|
+
elapsed: Duration.millis(elapsedMs)
|
|
47
|
+
}]];
|
|
48
|
+
})))));
|
|
49
|
+
//#endregion
|
|
50
|
+
export { Metrics_exports as t, timeToFirst, withElapsed, withRate };
|
|
51
|
+
|
|
52
|
+
//# sourceMappingURL=Metrics.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Metrics.mjs","names":[],"sources":["../../src/observability/Metrics.ts"],"sourcesContent":["import { Clock, Duration, Effect, Option, Stream } from \"effect\"\n\n/**\n * Annotate every event in a stream with the elapsed `Duration` since the\n * stream started consuming. The first event reports its time-from-start,\n * which is also the conventional \"time to first ____\" metric.\n */\nexport const withElapsed = <A, E, R>(\n self: Stream.Stream<A, E, R>,\n): Stream.Stream<{ readonly value: A; readonly elapsed: Duration.Duration }, E, R> =>\n Stream.unwrap(\n Effect.map(Clock.currentTimeMillis, (start) =>\n self.pipe(\n Stream.mapEffect((value) =>\n Effect.map(Clock.currentTimeMillis, (now) => ({\n value,\n elapsed: Duration.millis(now - start),\n })),\n ),\n ),\n ),\n )\n\n/**\n * Compute the elapsed time until the first event matching the predicate.\n * Returns `Option.none()` if the stream completes without one.\n *\n * Consumes the stream. To track this *alongside* live consumption, use\n * `Stream.broadcast` to fan the source out and run `timeToFirst` on one\n * branch.\n */\nexport const timeToFirst =\n <A>(predicate: (a: A) => boolean) =>\n <E, R>(self: Stream.Stream<A, E, R>): Effect.Effect<Option.Option<Duration.Duration>, E, R> =>\n withElapsed(self).pipe(\n Stream.filter(({ value }) => predicate(value)),\n Stream.runHead,\n Effect.map(Option.map(({ elapsed }) => elapsed)),\n )\n\nexport interface RatePoint<A> {\n readonly value: A\n readonly total: number\n readonly ratePerSecond: number\n readonly elapsed: Duration.Duration\n}\n\n/**\n * Annotate every event with a running total and a rolling rate per second,\n * computed from a user-supplied weight function.\n *\n * The weight is the unit you care about - bytes, tokens, error count, etc.\n * For tokens-per-second on `TurnEvent`, pass:\n *\n * `(d) => d.type === \"text_delta\" ? countTokens(d.text) : 0`\n *\n * Use any tokenizer you like; the library does not ship one.\n */\nexport const withRate =\n <A>(weight: (a: A) => number) =>\n <E, R>(self: Stream.Stream<A, E, R>): Stream.Stream<RatePoint<A>, E, R> =>\n Stream.unwrap(\n Effect.map(Clock.currentTimeMillis, (start) =>\n self.pipe(\n Stream.mapAccumEffect(\n () => ({ total: 0 }),\n (acc, value) =>\n Effect.map(Clock.currentTimeMillis, (now) => {\n const total = acc.total + weight(value)\n const elapsedMs = now - start\n const ratePerSecond = elapsedMs > 0 ? (total / elapsedMs) * 1000 : 0\n return [\n { total },\n [\n {\n value,\n total,\n ratePerSecond,\n elapsed: Duration.millis(elapsedMs),\n } satisfies RatePoint<A>,\n ],\n ] as const\n }),\n ),\n ),\n ),\n )\n"],"mappings":";;;;;;;;;;;;;AAOA,MAAa,eACX,SAEA,OAAO,OACL,OAAO,IAAI,MAAM,oBAAoB,UACnC,KAAK,KACH,OAAO,WAAW,UAChB,OAAO,IAAI,MAAM,oBAAoB,SAAS;CAC5C;CACA,SAAS,SAAS,OAAO,MAAM,MAAM;CACtC,EAAE,CACJ,CACF,CACF,CACF;;;;;;;;;AAUH,MAAa,eACP,eACG,SACL,YAAY,KAAK,CAAC,KAChB,OAAO,QAAQ,EAAE,YAAY,UAAU,MAAM,CAAC,EAC9C,OAAO,SACP,OAAO,IAAI,OAAO,KAAK,EAAE,cAAc,QAAQ,CAAC,CACjD;;;;;;;;;;;;AAoBL,MAAa,YACP,YACG,SACL,OAAO,OACL,OAAO,IAAI,MAAM,oBAAoB,UACnC,KAAK,KACH,OAAO,sBACE,EAAE,OAAO,GAAG,IAClB,KAAK,UACJ,OAAO,IAAI,MAAM,oBAAoB,QAAQ;CAC3C,MAAM,QAAQ,IAAI,QAAQ,OAAO,MAAM;CACvC,MAAM,YAAY,MAAM;CACxB,MAAM,gBAAgB,YAAY,IAAK,QAAQ,YAAa,MAAO;AACnE,QAAO,CACL,EAAE,OAAO,EACT,CACE;EACE;EACA;EACA;EACA,SAAS,SAAS,OAAO,UAAU;EACpC,CACF,CACF;EACD,CACL,CACF,CACF,CACF"}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { Schema, Stream } from "effect";
|
|
2
|
+
import * as _$effect_Types0 from "effect/Types";
|
|
3
|
+
import * as _$effect_Cause0 from "effect/Cause";
|
|
4
|
+
|
|
5
|
+
//#region src/streaming/JSONL.d.ts
|
|
6
|
+
declare namespace JSONL_d_exports {
|
|
7
|
+
export { JsonParseError, fromBytes, parse, toBytes };
|
|
8
|
+
}
|
|
9
|
+
declare const JsonParseError_base: new <A extends Record<string, any> = {}>(args: _$effect_Types0.VoidIfEmpty<{ readonly [P in keyof A as P extends "_tag" ? never : P]: A[P] }>) => _$effect_Cause0.YieldableError & {
|
|
10
|
+
readonly _tag: "JsonParseError";
|
|
11
|
+
} & Readonly<A>;
|
|
12
|
+
declare class JsonParseError extends JsonParseError_base<{
|
|
13
|
+
readonly line: string;
|
|
14
|
+
readonly cause: unknown;
|
|
15
|
+
}> {}
|
|
16
|
+
/**
|
|
17
|
+
* Decode a `Stream<Uint8Array>` into a `Stream<string>` of newline-delimited
|
|
18
|
+
* lines. Empty lines are skipped. Buffers across chunk boundaries.
|
|
19
|
+
*/
|
|
20
|
+
declare const fromBytes: <E, R>(self: Stream.Stream<Uint8Array, E, R>) => Stream.Stream<string, E, R>;
|
|
21
|
+
/**
|
|
22
|
+
* Validate each JSONL line against a Schema. JSON parse errors and Schema
|
|
23
|
+
* decode errors both surface as a `JsonParseError` so callers can `catchTag`
|
|
24
|
+
* uniformly.
|
|
25
|
+
*/
|
|
26
|
+
declare const parse: <A, I>(schema: Schema.Codec<A, I>) => <E, R>(self: Stream.Stream<string, E, R>) => Stream.Stream<A, JsonParseError | E, R>;
|
|
27
|
+
/**
|
|
28
|
+
* Serialize a stream of values to JSONL bytes. Encodes each value via
|
|
29
|
+
* `Schema.encodeUnknownSync`. Each line ends with `\n`.
|
|
30
|
+
*/
|
|
31
|
+
declare const toBytes: <A, I>(schema: Schema.Codec<A, I>) => <E, R>(self: Stream.Stream<A, E, R>) => Stream.Stream<Uint8Array, E, R>;
|
|
32
|
+
//#endregion
|
|
33
|
+
export { JsonParseError, fromBytes, parse, JSONL_d_exports as t, toBytes };
|
|
34
|
+
//# sourceMappingURL=JSONL.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"JSONL.d.mts","names":[],"sources":["../../src/streaming/JSONL.ts"],"mappings":";;;;;;;;cAAqD,mBAAA;;;cAExC,cAAA,SAAuB,mBAAA;EAAA,SACzB,IAAA;EAAA,SACA,KAAA;AAAA;;;;AAJ0C;cAgDxC,SAAA,SACX,IAAA,EAAM,MAAA,CAAO,MAAA,CAAO,UAAA,EAAY,CAAA,EAAG,CAAA,MAClC,MAAA,CAAO,MAAA,SAAe,CAAA,EAAG,CAAA;;;;;;cAaf,KAAA,SACJ,MAAA,EAAQ,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,CAAA,aACxB,IAAA,EAAM,MAAA,CAAO,MAAA,SAAe,CAAA,EAAG,CAAA,MAAK,MAAA,CAAO,MAAA,CAAO,CAAA,EAAG,cAAA,GAAiB,CAAA,EAAG,CAAA;;;;;cAsBrE,OAAA,SACJ,MAAA,EAAQ,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,CAAA,aACxB,IAAA,EAAM,MAAA,CAAO,MAAA,CAAO,CAAA,EAAG,CAAA,EAAG,CAAA,MAAK,MAAA,CAAO,MAAA,CAAO,UAAA,EAAY,CAAA,EAAG,CAAA"}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { t as __exportAll } from "../chunk-CfYAbeIz.mjs";
|
|
2
|
+
import { Data, Effect, Schema, Stream } from "effect";
|
|
3
|
+
//#region src/streaming/JSONL.ts
|
|
4
|
+
var JSONL_exports = /* @__PURE__ */ __exportAll({
|
|
5
|
+
JsonParseError: () => JsonParseError,
|
|
6
|
+
fromBytes: () => fromBytes,
|
|
7
|
+
parse: () => parse,
|
|
8
|
+
toBytes: () => toBytes
|
|
9
|
+
});
|
|
10
|
+
var JsonParseError = class extends Data.TaggedError("JsonParseError") {};
|
|
11
|
+
const decodeText = (self) => self.pipe(Stream.mapAccum(() => new TextDecoder("utf-8"), (decoder, chunk) => [decoder, [decoder.decode(chunk, { stream: true })]], { onHalt: (decoder) => {
|
|
12
|
+
const tail = decoder.decode();
|
|
13
|
+
return tail.length > 0 ? [tail] : [];
|
|
14
|
+
} }));
|
|
15
|
+
const splitOn = (separator) => (self) => self.pipe(Stream.mapAccum(() => "", (buffer, chunk) => {
|
|
16
|
+
const parts = (buffer + chunk).split(separator);
|
|
17
|
+
return [parts[parts.length - 1] ?? "", parts.slice(0, -1)];
|
|
18
|
+
}, { onHalt: (tail) => tail.length > 0 ? [tail] : [] }));
|
|
19
|
+
/**
|
|
20
|
+
* Decode a `Stream<Uint8Array>` into a `Stream<string>` of newline-delimited
|
|
21
|
+
* lines. Empty lines are skipped. Buffers across chunk boundaries.
|
|
22
|
+
*/
|
|
23
|
+
const fromBytes = (self) => self.pipe(decodeText, Stream.map((s) => s.replace(/\r/g, "")), splitOn("\n"), Stream.filter((line) => line.length > 0));
|
|
24
|
+
/**
|
|
25
|
+
* Validate each JSONL line against a Schema. JSON parse errors and Schema
|
|
26
|
+
* decode errors both surface as a `JsonParseError` so callers can `catchTag`
|
|
27
|
+
* uniformly.
|
|
28
|
+
*/
|
|
29
|
+
const parse = (schema) => (self) => self.pipe(Stream.mapEffect((line) => Effect.try({
|
|
30
|
+
try: () => JSON.parse(line),
|
|
31
|
+
catch: (cause) => new JsonParseError({
|
|
32
|
+
line,
|
|
33
|
+
cause
|
|
34
|
+
})
|
|
35
|
+
}).pipe(Effect.flatMap((value) => Schema.decodeUnknownEffect(schema)(value).pipe(Effect.mapError((cause) => new JsonParseError({
|
|
36
|
+
line,
|
|
37
|
+
cause
|
|
38
|
+
})))))));
|
|
39
|
+
const encoder = new TextEncoder();
|
|
40
|
+
/**
|
|
41
|
+
* Serialize a stream of values to JSONL bytes. Encodes each value via
|
|
42
|
+
* `Schema.encodeUnknownSync`. Each line ends with `\n`.
|
|
43
|
+
*/
|
|
44
|
+
const toBytes = (schema) => (self) => self.pipe(Stream.map((value) => {
|
|
45
|
+
const encoded = Schema.encodeUnknownSync(schema)(value);
|
|
46
|
+
return encoder.encode(JSON.stringify(encoded) + "\n");
|
|
47
|
+
}));
|
|
48
|
+
//#endregion
|
|
49
|
+
export { JsonParseError, fromBytes, parse, JSONL_exports as t, toBytes };
|
|
50
|
+
|
|
51
|
+
//# sourceMappingURL=JSONL.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"JSONL.mjs","names":[],"sources":["../../src/streaming/JSONL.ts"],"sourcesContent":["import { Data, Effect, Schema, Stream } from \"effect\"\n\nexport class JsonParseError extends Data.TaggedError(\"JsonParseError\")<{\n readonly line: string\n readonly cause: unknown\n}> {}\n\n// ---------------------------------------------------------------------------\n// Generic stream helpers (kept module-local; see SSE.ts for the same shape).\n// ---------------------------------------------------------------------------\n\nconst decodeText = <E, R>(self: Stream.Stream<Uint8Array, E, R>): Stream.Stream<string, E, R> =>\n self.pipe(\n Stream.mapAccum(\n (): TextDecoder => new TextDecoder(\"utf-8\"),\n (decoder, chunk: Uint8Array) => [decoder, [decoder.decode(chunk, { stream: true })]] as const,\n {\n onHalt: (decoder: TextDecoder) => {\n const tail = decoder.decode()\n return tail.length > 0 ? [tail] : []\n },\n },\n ),\n )\n\nconst splitOn =\n (separator: string) =>\n <E, R>(self: Stream.Stream<string, E, R>): Stream.Stream<string, E, R> =>\n self.pipe(\n Stream.mapAccum(\n (): string => \"\",\n (buffer, chunk: string) => {\n const parts = (buffer + chunk).split(separator)\n const tail = parts[parts.length - 1] ?? \"\"\n return [tail, parts.slice(0, -1)] as const\n },\n { onHalt: (tail: string) => (tail.length > 0 ? [tail] : []) },\n ),\n )\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Decode a `Stream<Uint8Array>` into a `Stream<string>` of newline-delimited\n * lines. Empty lines are skipped. Buffers across chunk boundaries.\n */\nexport const fromBytes = <E, R>(\n self: Stream.Stream<Uint8Array, E, R>,\n): Stream.Stream<string, E, R> =>\n self.pipe(\n decodeText,\n Stream.map((s) => s.replace(/\\r/g, \"\")),\n splitOn(\"\\n\"),\n Stream.filter((line) => line.length > 0),\n )\n\n/**\n * Validate each JSONL line against a Schema. JSON parse errors and Schema\n * decode errors both surface as a `JsonParseError` so callers can `catchTag`\n * uniformly.\n */\nexport const parse =\n <A, I>(schema: Schema.Codec<A, I>) =>\n <E, R>(self: Stream.Stream<string, E, R>): Stream.Stream<A, JsonParseError | E, R> =>\n self.pipe(\n Stream.mapEffect((line) =>\n Effect.try({\n try: () => JSON.parse(line) as unknown,\n catch: (cause) => new JsonParseError({ line, cause }),\n }).pipe(\n Effect.flatMap((value) =>\n Schema.decodeUnknownEffect(schema)(value).pipe(\n Effect.mapError((cause) => new JsonParseError({ line, cause })),\n ),\n ),\n ),\n ),\n )\n\nconst encoder = new TextEncoder()\n\n/**\n * Serialize a stream of values to JSONL bytes. Encodes each value via\n * `Schema.encodeUnknownSync`. Each line ends with `\\n`.\n */\nexport const toBytes =\n <A, I>(schema: Schema.Codec<A, I>) =>\n <E, R>(self: Stream.Stream<A, E, R>): Stream.Stream<Uint8Array, E, R> =>\n self.pipe(\n Stream.map((value) => {\n const encoded = Schema.encodeUnknownSync(schema)(value)\n return encoder.encode(JSON.stringify(encoded) + \"\\n\")\n }),\n )\n"],"mappings":";;;;;;;;;AAEA,IAAa,iBAAb,cAAoC,KAAK,YAAY,iBAAiB,CAGnE;AAMH,MAAM,cAAoB,SACxB,KAAK,KACH,OAAO,eACc,IAAI,YAAY,QAAQ,GAC1C,SAAS,UAAsB,CAAC,SAAS,CAAC,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC,CAAC,CAAC,EACpF,EACE,SAAS,YAAyB;CAChC,MAAM,OAAO,QAAQ,QAAQ;AAC7B,QAAO,KAAK,SAAS,IAAI,CAAC,KAAK,GAAG,EAAE;GAEvC,CACF,CACF;AAEH,MAAM,WACH,eACM,SACL,KAAK,KACH,OAAO,eACS,KACb,QAAQ,UAAkB;CACzB,MAAM,SAAS,SAAS,OAAO,MAAM,UAAU;AAE/C,QAAO,CADM,MAAM,MAAM,SAAS,MAAM,IAC1B,MAAM,MAAM,GAAG,GAAG,CAAC;GAEnC,EAAE,SAAS,SAAkB,KAAK,SAAS,IAAI,CAAC,KAAK,GAAG,EAAE,EAAG,CAC9D,CACF;;;;;AAUL,MAAa,aACX,SAEA,KAAK,KACH,YACA,OAAO,KAAK,MAAM,EAAE,QAAQ,OAAO,GAAG,CAAC,EACvC,QAAQ,KAAK,EACb,OAAO,QAAQ,SAAS,KAAK,SAAS,EAAE,CACzC;;;;;;AAOH,MAAa,SACJ,YACA,SACL,KAAK,KACH,OAAO,WAAW,SAChB,OAAO,IAAI;CACT,WAAW,KAAK,MAAM,KAAK;CAC3B,QAAQ,UAAU,IAAI,eAAe;EAAE;EAAM;EAAO,CAAC;CACtD,CAAC,CAAC,KACD,OAAO,SAAS,UACd,OAAO,oBAAoB,OAAO,CAAC,MAAM,CAAC,KACxC,OAAO,UAAU,UAAU,IAAI,eAAe;CAAE;CAAM;CAAO,CAAC,CAAC,CAChE,CACF,CACF,CACF,CACF;AAEL,MAAM,UAAU,IAAI,aAAa;;;;;AAMjC,MAAa,WACJ,YACA,SACL,KAAK,KACH,OAAO,KAAK,UAAU;CACpB,MAAM,UAAU,OAAO,kBAAkB,OAAO,CAAC,MAAM;AACvD,QAAO,QAAQ,OAAO,KAAK,UAAU,QAAQ,GAAG,KAAK;EACrD,CACH"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { Stream } from "effect";
|
|
2
|
+
|
|
3
|
+
//#region src/streaming/Lines.d.ts
|
|
4
|
+
declare namespace Lines_d_exports {
|
|
5
|
+
export { lines, linesStrict };
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Split a string stream on `\n`, emitting one line per element. Buffers
|
|
9
|
+
* partial chunks until a newline arrives, and flushes any non-newline
|
|
10
|
+
* tail at stream end - so streams that don't terminate with `\n`
|
|
11
|
+
* (typical of LLM token streams) still get their last line. Empty lines
|
|
12
|
+
* are dropped, `\r` is stripped (handles `\r\n` endings).
|
|
13
|
+
*
|
|
14
|
+
* Intended use: feed text deltas from a model that has been prompted to
|
|
15
|
+
* emit JSONL (or any other newline-delimited format), then parse /
|
|
16
|
+
* validate each emitted line.
|
|
17
|
+
*/
|
|
18
|
+
declare const lines: <E, R>(self: Stream.Stream<string, E, R>) => Stream.Stream<string, E, R>;
|
|
19
|
+
/**
|
|
20
|
+
* Like `lines`, but only emits lines that were terminated by `\n`. Any
|
|
21
|
+
* partial trailing content is dropped at stream end. Use when you want
|
|
22
|
+
* strict "complete-line-or-nothing" semantics.
|
|
23
|
+
*/
|
|
24
|
+
declare const linesStrict: <E, R>(self: Stream.Stream<string, E, R>) => Stream.Stream<string, E, R>;
|
|
25
|
+
//#endregion
|
|
26
|
+
export { lines, linesStrict, Lines_d_exports as t };
|
|
27
|
+
//# sourceMappingURL=Lines.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Lines.d.mts","names":[],"sources":["../../src/streaming/Lines.ts"],"mappings":";;;;;;;;;;;;;AAaA;;;;cAAa,KAAA,SAAe,IAAA,EAAM,MAAA,CAAO,MAAA,SAAe,CAAA,EAAG,CAAA,MAAK,MAAA,CAAO,MAAA,SAAe,CAAA,EAAG,CAAA;;;;;;cAQ5E,WAAA,SAAqB,IAAA,EAAM,MAAA,CAAO,MAAA,SAAe,CAAA,EAAG,CAAA,MAAK,MAAA,CAAO,MAAA,SAAe,CAAA,EAAG,CAAA"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { t as __exportAll } from "../chunk-CfYAbeIz.mjs";
|
|
2
|
+
import { Stream } from "effect";
|
|
3
|
+
//#region src/streaming/Lines.ts
|
|
4
|
+
var Lines_exports = /* @__PURE__ */ __exportAll({
|
|
5
|
+
lines: () => lines,
|
|
6
|
+
linesStrict: () => linesStrict
|
|
7
|
+
});
|
|
8
|
+
/**
|
|
9
|
+
* Split a string stream on `\n`, emitting one line per element. Buffers
|
|
10
|
+
* partial chunks until a newline arrives, and flushes any non-newline
|
|
11
|
+
* tail at stream end - so streams that don't terminate with `\n`
|
|
12
|
+
* (typical of LLM token streams) still get their last line. Empty lines
|
|
13
|
+
* are dropped, `\r` is stripped (handles `\r\n` endings).
|
|
14
|
+
*
|
|
15
|
+
* Intended use: feed text deltas from a model that has been prompted to
|
|
16
|
+
* emit JSONL (or any other newline-delimited format), then parse /
|
|
17
|
+
* validate each emitted line.
|
|
18
|
+
*/
|
|
19
|
+
const lines = (self) => linesStrict(Stream.concat(self, Stream.make("\n")));
|
|
20
|
+
/**
|
|
21
|
+
* Like `lines`, but only emits lines that were terminated by `\n`. Any
|
|
22
|
+
* partial trailing content is dropped at stream end. Use when you want
|
|
23
|
+
* strict "complete-line-or-nothing" semantics.
|
|
24
|
+
*/
|
|
25
|
+
const linesStrict = (self) => self.pipe(Stream.mapAccum(() => "", (buffer, chunk) => {
|
|
26
|
+
const parts = (buffer + chunk).split("\n");
|
|
27
|
+
return [parts.pop() ?? "", parts.map((line) => line.replace(/\r/g, ""))];
|
|
28
|
+
}), Stream.filter((line) => line.trim().length > 0));
|
|
29
|
+
//#endregion
|
|
30
|
+
export { lines, linesStrict, Lines_exports as t };
|
|
31
|
+
|
|
32
|
+
//# sourceMappingURL=Lines.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Lines.mjs","names":[],"sources":["../../src/streaming/Lines.ts"],"sourcesContent":["import { Stream } from \"effect\"\n\n/**\n * Split a string stream on `\\n`, emitting one line per element. Buffers\n * partial chunks until a newline arrives, and flushes any non-newline\n * tail at stream end - so streams that don't terminate with `\\n`\n * (typical of LLM token streams) still get their last line. Empty lines\n * are dropped, `\\r` is stripped (handles `\\r\\n` endings).\n *\n * Intended use: feed text deltas from a model that has been prompted to\n * emit JSONL (or any other newline-delimited format), then parse /\n * validate each emitted line.\n */\nexport const lines = <E, R>(self: Stream.Stream<string, E, R>): Stream.Stream<string, E, R> =>\n linesStrict(Stream.concat(self, Stream.make(\"\\n\")))\n\n/**\n * Like `lines`, but only emits lines that were terminated by `\\n`. Any\n * partial trailing content is dropped at stream end. Use when you want\n * strict \"complete-line-or-nothing\" semantics.\n */\nexport const linesStrict = <E, R>(self: Stream.Stream<string, E, R>): Stream.Stream<string, E, R> =>\n self.pipe(\n Stream.mapAccum(\n (): string => \"\",\n (buffer, chunk: string) => {\n const combined = buffer + chunk\n const parts = combined.split(\"\\n\")\n const tail = parts.pop() ?? \"\"\n return [tail, parts.map((line) => line.replace(/\\r/g, \"\"))] as const\n },\n ),\n Stream.filter((line) => line.trim().length > 0),\n )\n"],"mappings":";;;;;;;;;;;;;;;;;;AAaA,MAAa,SAAe,SAC1B,YAAY,OAAO,OAAO,MAAM,OAAO,KAAK,KAAK,CAAC,CAAC;;;;;;AAOrD,MAAa,eAAqB,SAChC,KAAK,KACH,OAAO,eACS,KACb,QAAQ,UAAkB;CAEzB,MAAM,SADW,SAAS,OACH,MAAM,KAAK;AAElC,QAAO,CADM,MAAM,KAAK,IAAI,IACd,MAAM,KAAK,SAAS,KAAK,QAAQ,OAAO,GAAG,CAAC,CAAC;EAE9D,EACD,OAAO,QAAQ,SAAS,KAAK,MAAM,CAAC,SAAS,EAAE,CAChD"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { Stream } from "effect";
|
|
2
|
+
|
|
3
|
+
//#region src/streaming/SSE.d.ts
|
|
4
|
+
declare namespace SSE_d_exports {
|
|
5
|
+
export { Event, fromBytes, toBytes };
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* One Server-Sent Event. Fields per the WHATWG spec:
|
|
9
|
+
* - `event`: optional event name (default "message" on the wire)
|
|
10
|
+
* - `data`: payload, with multiple `data:` lines joined by `\n`
|
|
11
|
+
* - `id`: optional last-event id
|
|
12
|
+
*/
|
|
13
|
+
interface Event {
|
|
14
|
+
readonly event?: string;
|
|
15
|
+
readonly data: string;
|
|
16
|
+
readonly id?: string;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Decode a `Stream<Uint8Array>` (e.g. an HTTP response body) into a
|
|
20
|
+
* `Stream<SSE.Event>`. Handles partial UTF-8 sequences, CRLF/LF line
|
|
21
|
+
* endings, and events split across chunk boundaries.
|
|
22
|
+
*/
|
|
23
|
+
declare const fromBytes: <E, R>(self: Stream.Stream<Uint8Array, E, R>) => Stream.Stream<Event, E, R>;
|
|
24
|
+
/**
|
|
25
|
+
* Encode a `Stream<Event>` as `Stream<Uint8Array>` ready to send on an
|
|
26
|
+
* HTTP response with `Content-Type: text/event-stream`.
|
|
27
|
+
*/
|
|
28
|
+
declare const toBytes: <E, R>(self: Stream.Stream<Event, E, R>) => Stream.Stream<Uint8Array, E, R>;
|
|
29
|
+
//#endregion
|
|
30
|
+
export { Event, fromBytes, SSE_d_exports as t, toBytes };
|
|
31
|
+
//# sourceMappingURL=SSE.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SSE.d.mts","names":[],"sources":["../../src/streaming/SSE.ts"],"mappings":";;;;;;;;;;;;UAQiB,KAAA;EAAA,SACN,KAAA;EAAA,SACA,IAAA;EAAA,SACA,EAAA;AAAA;;;;;;cA4EE,SAAA,SACX,IAAA,EAAM,MAAA,CAAO,MAAA,CAAO,UAAA,EAAY,CAAA,EAAG,CAAA,MAClC,MAAA,CAAO,MAAA,CAAO,KAAA,EAAO,CAAA,EAAG,CAAA;AAF3B;;;;AAAA,cAyBa,OAAA,SAAiB,IAAA,EAAM,MAAA,CAAO,MAAA,CAAO,KAAA,EAAO,CAAA,EAAG,CAAA,MAAK,MAAA,CAAO,MAAA,CAAO,UAAA,EAAY,CAAA,EAAG,CAAA"}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { t as __exportAll } from "../chunk-CfYAbeIz.mjs";
|
|
2
|
+
import { Stream } from "effect";
|
|
3
|
+
//#region src/streaming/SSE.ts
|
|
4
|
+
var SSE_exports = /* @__PURE__ */ __exportAll({
|
|
5
|
+
fromBytes: () => fromBytes,
|
|
6
|
+
toBytes: () => toBytes
|
|
7
|
+
});
|
|
8
|
+
/** Decode `Uint8Array` chunks as UTF-8, handling multi-byte boundaries. */
|
|
9
|
+
const decodeText = (self) => self.pipe(Stream.mapAccum(() => new TextDecoder("utf-8"), (decoder, chunk) => [decoder, [decoder.decode(chunk, { stream: true })]], { onHalt: (decoder) => {
|
|
10
|
+
const tail = decoder.decode();
|
|
11
|
+
return tail.length > 0 ? [tail] : [];
|
|
12
|
+
} }));
|
|
13
|
+
/** Split a text stream on a separator, buffering across chunk boundaries. */
|
|
14
|
+
const splitOn = (separator) => (self) => self.pipe(Stream.mapAccum(() => "", (buffer, chunk) => {
|
|
15
|
+
const parts = (buffer + chunk).split(separator);
|
|
16
|
+
return [parts[parts.length - 1] ?? "", parts.slice(0, -1)];
|
|
17
|
+
}, { onHalt: (tail) => tail.length > 0 ? [tail] : [] }));
|
|
18
|
+
const parseField = (line) => {
|
|
19
|
+
const colon = line.indexOf(":");
|
|
20
|
+
if (colon < 0) return [line, ""];
|
|
21
|
+
const value = line.slice(colon + 1);
|
|
22
|
+
return [line.slice(0, colon), value.startsWith(" ") ? value.slice(1) : value];
|
|
23
|
+
};
|
|
24
|
+
const parseBlock = (block) => {
|
|
25
|
+
const lines = block.split("\n").filter((l) => l.length > 0 && !l.startsWith(":"));
|
|
26
|
+
if (lines.length === 0) return null;
|
|
27
|
+
const fields = lines.map(parseField);
|
|
28
|
+
const dataLines = fields.filter(([f]) => f === "data").map(([, v]) => v);
|
|
29
|
+
const event = fields.find(([f]) => f === "event")?.[1];
|
|
30
|
+
const id = fields.find(([f]) => f === "id")?.[1];
|
|
31
|
+
const out = { data: dataLines.join("\n") };
|
|
32
|
+
if (event !== void 0) out.event = event;
|
|
33
|
+
if (id !== void 0) out.id = id;
|
|
34
|
+
return out;
|
|
35
|
+
};
|
|
36
|
+
/**
|
|
37
|
+
* Decode a `Stream<Uint8Array>` (e.g. an HTTP response body) into a
|
|
38
|
+
* `Stream<SSE.Event>`. Handles partial UTF-8 sequences, CRLF/LF line
|
|
39
|
+
* endings, and events split across chunk boundaries.
|
|
40
|
+
*/
|
|
41
|
+
const fromBytes = (self) => self.pipe(decodeText, Stream.map((s) => s.replace(/\r/g, "")), splitOn("\n\n"), Stream.map(parseBlock), Stream.filter((ev) => ev !== null));
|
|
42
|
+
const eventToString = (ev) => {
|
|
43
|
+
const parts = [];
|
|
44
|
+
if (ev.event !== void 0) parts.push(`event: ${ev.event}`);
|
|
45
|
+
if (ev.id !== void 0) parts.push(`id: ${ev.id}`);
|
|
46
|
+
for (const line of ev.data.split("\n")) parts.push(`data: ${line}`);
|
|
47
|
+
return parts.join("\n") + "\n\n";
|
|
48
|
+
};
|
|
49
|
+
const encoder = new TextEncoder();
|
|
50
|
+
/**
|
|
51
|
+
* Encode a `Stream<Event>` as `Stream<Uint8Array>` ready to send on an
|
|
52
|
+
* HTTP response with `Content-Type: text/event-stream`.
|
|
53
|
+
*/
|
|
54
|
+
const toBytes = (self) => Stream.map(self, (ev) => encoder.encode(eventToString(ev)));
|
|
55
|
+
//#endregion
|
|
56
|
+
export { fromBytes, SSE_exports as t, toBytes };
|
|
57
|
+
|
|
58
|
+
//# sourceMappingURL=SSE.mjs.map
|