@langgraph-js/sdk 3.7.0 → 3.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +29 -0
  2. package/dist/History.d.ts +115 -0
  3. package/dist/History.js +226 -0
  4. package/dist/LangGraphClient.d.ts +23 -2
  5. package/dist/LangGraphClient.js +118 -80
  6. package/dist/MessageProcessor.js +18 -24
  7. package/dist/SpendTime.js +4 -9
  8. package/dist/TestKit.d.ts +1 -1
  9. package/dist/TestKit.js +16 -15
  10. package/dist/ToolManager.js +4 -7
  11. package/dist/artifacts/index.js +1 -1
  12. package/dist/client/LanggraphServer.js +1 -1
  13. package/dist/client/LowJSServer.d.ts +3 -0
  14. package/dist/client/LowJSServer.js +80 -0
  15. package/dist/client/index.d.ts +2 -0
  16. package/dist/client/index.js +2 -0
  17. package/dist/client/utils/sse.d.ts +8 -0
  18. package/dist/client/utils/sse.js +151 -0
  19. package/dist/client/utils/stream.d.ts +15 -0
  20. package/dist/client/utils/stream.js +104 -0
  21. package/dist/index.d.ts +2 -0
  22. package/dist/index.js +2 -0
  23. package/dist/react/ChatContext.d.ts +31 -20
  24. package/dist/react/ChatContext.js +10 -4
  25. package/dist/tool/ToolUI.js +3 -2
  26. package/dist/tool/createTool.js +3 -6
  27. package/dist/tool/utils.js +3 -4
  28. package/dist/ui-store/createChatStore.d.ts +33 -66
  29. package/dist/ui-store/createChatStore.js +261 -247
  30. package/dist/vue/ChatContext.d.ts +41 -21
  31. package/dist/vue/ChatContext.js +8 -2
  32. package/package.json +3 -1
  33. package/src/History.ts +294 -0
  34. package/src/LangGraphClient.ts +98 -48
  35. package/src/client/LanggraphServer.ts +1 -2
  36. package/src/client/LowJSServer.ts +80 -0
  37. package/src/client/index.ts +2 -0
  38. package/src/client/utils/sse.ts +176 -0
  39. package/src/client/utils/stream.ts +114 -0
  40. package/src/index.ts +2 -0
  41. package/src/react/ChatContext.ts +25 -16
  42. package/src/ui-store/createChatStore.ts +310 -236
  43. package/src/vue/ChatContext.ts +12 -0
  44. package/test/TestKit.test.ts +10 -2
  45. package/tsconfig.json +1 -1
@@ -5,7 +5,7 @@ import { CallToolResult } from "./tool/createTool.js";
5
5
  import { type ILangGraphClient } from "@langgraph-js/pure-graph/dist/types.js";
6
6
  import { MessageProcessor } from "./MessageProcessor.js";
7
7
  import { revertChatTo, RevertChatToOptions } from "./time-travel/index.js";
8
-
8
+ import camelcaseKeys from "camelcase-keys";
9
9
  export type RenderMessage = Message & {
10
10
  /** 对于 AIMessage 来说是节点名称,对于工具节点来说是工具名称 */
11
11
  name?: string;
@@ -85,6 +85,8 @@ export interface LangGraphClientConfig {
85
85
  defaultHeaders?: Record<string, string | null | undefined>;
86
86
  /** 自定义客户端实现,如果不提供则使用官方 Client */
87
87
  client: ILangGraphClient<any>;
88
+ /** 是否使用 legacy 模式,默认 false */
89
+ legacyMode?: boolean;
88
90
  }
89
91
 
90
92
  // 定义事件数据类型
@@ -118,10 +120,13 @@ export class LangGraphClient<TStateType = unknown> extends EventEmitter<LangGrap
118
120
  stopController: AbortController | null = null;
119
121
  /** Message 处理器 */
120
122
  private messageProcessor: MessageProcessor;
121
-
123
+ private legacyMode: boolean;
124
+ /** 当前流式状态 */
125
+ private _status: "idle" | "busy" | "interrupted" | "error" = "idle";
122
126
  constructor(config: LangGraphClientConfig) {
123
127
  super();
124
128
  this.client = config.client;
129
+ this.legacyMode = config.legacyMode ?? false;
125
130
  this.messageProcessor = new MessageProcessor();
126
131
  }
127
132
 
@@ -139,6 +144,11 @@ export class LangGraphClient<TStateType = unknown> extends EventEmitter<LangGrap
139
144
  get runs(): ILangGraphClient["runs"] {
140
145
  return this.client.runs;
141
146
  }
147
+
148
+ /** 获取当前流式状态 */
149
+ get status() {
150
+ return this._status;
151
+ }
142
152
  private listAssistants() {
143
153
  return this.assistants.search({
144
154
  metadata: null,
@@ -208,10 +218,19 @@ export class LangGraphClient<TStateType = unknown> extends EventEmitter<LangGrap
208
218
  * @zh 列出所有的 Thread。
209
219
  * @en Lists all Threads.
210
220
  */
211
- async listThreads() {
221
+ async listThreads(
222
+ options: {
223
+ sortOrder?: "asc" | "desc";
224
+ sortBy?: "created_at" | "updated_at";
225
+ offset?: number;
226
+ limit?: number;
227
+ } = {}
228
+ ) {
212
229
  return this.threads.search({
213
- sortOrder: "desc",
214
- sortBy: "updated_at",
230
+ sortOrder: options.sortOrder || "desc",
231
+ sortBy: options.sortBy || "updated_at",
232
+ offset: options.offset || 0,
233
+ limit: options.limit || 10,
215
234
  });
216
235
  }
217
236
  async deleteThread(threadId: string) {
@@ -345,12 +364,27 @@ export class LangGraphClient<TStateType = unknown> extends EventEmitter<LangGrap
345
364
  content: input,
346
365
  } as HumanMessage,
347
366
  ];
367
+
368
+ const streamRecord: any[] = [];
369
+ this._status = "busy";
370
+ this.emit("start", {
371
+ event: "start",
372
+ });
348
373
  const createStreamResponse = async () => {
349
374
  if (_debug?.streamResponse) {
350
375
  return _debug.streamResponse;
351
376
  }
377
+ const onCallback = this.legacyMode
378
+ ? (chunk: any) => {
379
+ streamRecord.push(chunk);
380
+ this.processStreamChunk(chunk, command);
381
+ }
382
+ : undefined;
352
383
  if (joinRunId) {
353
- return this.runs.joinStream(this.currentThread!.thread_id, joinRunId);
384
+ return this.runs.joinStream(this.currentThread!.thread_id, joinRunId, {
385
+ /** @ts-ignore */
386
+ onCallback,
387
+ });
354
388
  }
355
389
 
356
390
  return this.runs.stream(this.currentThread!.thread_id, this.currentAssistant!.assistant_id, {
@@ -364,56 +398,22 @@ export class LangGraphClient<TStateType = unknown> extends EventEmitter<LangGrap
364
398
  streamMode: ["messages", "values"],
365
399
  streamSubgraphs: true,
366
400
  command,
401
+ /** @ts-ignore 为兼容不支持 AsyncIterableFunction 的环境*/
402
+ onCallback,
367
403
  });
368
404
  };
369
405
  const streamResponse = await createStreamResponse();
370
-
371
- const streamRecord: any[] = [];
372
- this.emit("start", {
373
- event: "start",
374
- });
375
-
376
- for await (const chunk of streamResponse) {
377
- streamRecord.push(chunk);
378
- if (chunk.event === "metadata") {
379
- this.currentRun = chunk.data;
380
- } else if (chunk.event === "error" || chunk.event === "Error" || chunk.event === "__stream_error__") {
381
- this.emit("error", chunk);
382
- } else if (chunk.event === "messages/metadata") {
383
- Object.assign(this.messagesMetadata, chunk.data);
384
- continue;
385
- } else if (chunk.event === "messages/partial" || chunk.event === "messages/complete") {
386
- for (const message of chunk.data) {
387
- this.messageProcessor.updateStreamingMessage(message);
388
- }
389
- this.emit("message", chunk);
390
- continue;
391
- } else if (chunk.event === "values") {
392
- const data = chunk.data as {
393
- __interrupt__?: InterruptData;
394
- messages: Message[];
395
- };
396
-
397
- if (data.__interrupt__) {
398
- this.humanInTheLoop = data.__interrupt__;
399
- } else if (data.messages) {
400
- const isResume = !!command?.resume;
401
- const isLongerThanLocal = data.messages.length >= this.messageProcessor.getGraphMessages().length;
402
- // resume 情况下,长度低于前端 message 的统统不接受
403
- if (!isResume || (isResume && isLongerThanLocal)) {
404
- this.messageProcessor.setGraphMessages(data.messages as RenderMessage[]);
405
- this.emit("value", chunk);
406
- }
407
- this.graphState = chunk.data;
408
- }
409
- continue;
410
- } else if (chunk.event.startsWith("values|")) {
411
- this.graphPosition = chunk.event.split("|")[1];
406
+ if (!this.legacyMode) {
407
+ // 正常的 JS 环境都可以执行,但是部分环境不支持 AsyncGeneratorFunction(比如 sb 的微信小程序)
408
+ for await (const chunk of streamResponse) {
409
+ streamRecord.push(chunk);
410
+ this.processStreamChunk(chunk, command);
412
411
  }
413
412
  }
414
413
  const data = await this.runFETool();
415
414
  if (data) streamRecord.push(...data);
416
415
  this.humanInTheLoop = null;
416
+ this._status = "idle";
417
417
  this.emit("done", {
418
418
  event: "done",
419
419
  });
@@ -436,6 +436,54 @@ export class LangGraphClient<TStateType = unknown> extends EventEmitter<LangGrap
436
436
  return position[position.length - 1];
437
437
  }
438
438
 
439
+ /**
440
+ * @zh 处理流式响应的单个 chunk。
441
+ * @en Processes a single chunk from the stream response.
442
+ * @returns 是否需要跳过后续处理 (continue)
443
+ */
444
+ private processStreamChunk(chunk: any, command?: Command): boolean {
445
+ if (chunk.event === "metadata") {
446
+ this.currentRun = chunk.data;
447
+ } else if (chunk.event === "error" || chunk.event === "Error" || chunk.event === "__stream_error__") {
448
+ this._status = "error";
449
+ this.emit("error", chunk);
450
+ } else if (chunk.event === "messages/metadata") {
451
+ Object.assign(this.messagesMetadata, chunk.data);
452
+ return true;
453
+ } else if (chunk.event === "messages/partial" || chunk.event === "messages/complete") {
454
+ for (const message of chunk.data) {
455
+ this.messageProcessor.updateStreamingMessage(message);
456
+ }
457
+ this.emit("message", chunk);
458
+ return true;
459
+ } else if (chunk.event === "values") {
460
+ const data = chunk.data as {
461
+ __interrupt__?: InterruptData;
462
+ messages: Message[];
463
+ };
464
+
465
+ if (data.__interrupt__) {
466
+ this._status = "interrupted";
467
+ this.humanInTheLoop = camelcaseKeys(data.__interrupt__, {
468
+ deep: true,
469
+ });
470
+ } else if (data.messages) {
471
+ const isResume = !!command?.resume;
472
+ const isLongerThanLocal = data.messages.length >= this.messageProcessor.getGraphMessages().length;
473
+ // resume 情况下,长度低于前端 message 的统统不接受
474
+ if (!isResume || (isResume && isLongerThanLocal)) {
475
+ this.messageProcessor.setGraphMessages(data.messages as RenderMessage[]);
476
+ this.emit("value", chunk);
477
+ }
478
+ this.graphState = chunk.data;
479
+ }
480
+ return true;
481
+ } else if (chunk.event.startsWith("values|")) {
482
+ this.graphPosition = chunk.event.split("|")[1];
483
+ }
484
+ return false;
485
+ }
486
+
439
487
  private runFETool() {
440
488
  const data = this.messageProcessor.getStreamingMessages(); // 需要保证不被清理
441
489
  const lastMessage = data[data.length - 1];
@@ -453,6 +501,7 @@ export class LangGraphClient<TStateType = unknown> extends EventEmitter<LangGrap
453
501
  // json 校验
454
502
  return this.callFETool(toolMessage, tool.args);
455
503
  });
504
+ this._status = "interrupted";
456
505
  this.currentThread!.status = "interrupted"; // 修复某些机制下,状态不为 interrupted 与后端有差异
457
506
  return Promise.all(result);
458
507
  }
@@ -517,6 +566,7 @@ export class LangGraphClient<TStateType = unknown> extends EventEmitter<LangGrap
517
566
  this.messageProcessor.clearStreamingMessages();
518
567
  this.currentRun = undefined;
519
568
  this.tools.clearWaiting();
569
+ this._status = "idle";
520
570
  this.emit("value", {
521
571
  event: "messages/partial",
522
572
  data: {
@@ -1,7 +1,6 @@
1
1
  import { LangGraphClientConfig } from "../LangGraphClient.js";
2
2
  import { type ILangGraphClient } from "@langgraph-js/pure-graph/dist/types.js";
3
-
3
+ import { Client } from "@langchain/langgraph-sdk";
4
4
  export const createLangGraphServerClient = async (config: LangGraphClientConfig): Promise<ILangGraphClient> => {
5
- const { Client } = await import("@langchain/langgraph-sdk");
6
5
  return new Client(config) as ILangGraphClient;
7
6
  };
@@ -0,0 +1,80 @@
1
+ import { BytesLineDecoder, SSEDecoder } from "./utils/sse.js";
2
+ import { LangGraphClientConfig } from "../LangGraphClient.js";
3
+ import { ILangGraphClient } from "@langgraph-js/pure-graph/dist/types.js";
4
+
5
+ const REGEX_RUN_METADATA = /(\/threads\/(?<thread_id>.+))?\/runs\/(?<run_id>.+)/;
6
+ function getRunMetadataFromResponse(response: Response) {
7
+ const contentLocation = response.headers.get("Content-Location");
8
+ if (!contentLocation) return void 0;
9
+ const match = REGEX_RUN_METADATA.exec(contentLocation);
10
+ if (!match?.groups?.run_id) return void 0;
11
+ return {
12
+ run_id: match.groups.run_id,
13
+ thread_id: match.groups.thread_id || void 0,
14
+ };
15
+ }
16
+ import { Client } from "@langchain/langgraph-sdk";
17
+
18
+ export const createLowerJSClient = (config: Omit<LangGraphClientConfig, "client">): ILangGraphClient => {
19
+ const client = new Client(config);
20
+ /** @ts-ignore */
21
+ client.runs.joinStream = async function (this: any, threadId: string | null, runId: string, options: any) {
22
+ const opts = typeof options === "object" && options != null && options instanceof AbortSignal ? { signal: options } : options;
23
+ let [url, init] = this.prepareFetchOptions(threadId != null ? `/threads/${threadId}/runs/${runId}/stream` : `/runs/${runId}/stream`, {
24
+ method: "GET",
25
+ timeoutMs: null,
26
+ signal: opts?.signal,
27
+ headers: opts?.lastEventId ? { "Last-Event-ID": opts.lastEventId } : void 0,
28
+ params: {
29
+ cancel_on_disconnect: opts?.cancelOnDisconnect ? "1" : "0",
30
+ stream_mode: opts?.streamMode,
31
+ },
32
+ });
33
+ if (this.onRequest != null) init = await this.onRequest(url, init);
34
+ const response = await this.asyncCaller.fetch(url, init);
35
+ const stream: ReadableStream = (response.body || new ReadableStream({ start: (ctrl) => ctrl.close() })).pipeThrough(BytesLineDecoder()).pipeThrough(SSEDecoder());
36
+ return stream.pipeTo(new WritableStream({ write: (chunk) => options.onCallback?.(chunk) }));
37
+ }.bind(client.runs);
38
+ /** @ts-ignore */
39
+ client.runs.stream = async function (this: any, threadId: string | null, assistantId: string, payload?: any) {
40
+ const json = {
41
+ input: payload?.input,
42
+ command: payload?.command,
43
+ config: payload?.config,
44
+ context: payload?.context,
45
+ metadata: payload?.metadata,
46
+ stream_mode: payload?.streamMode,
47
+ stream_subgraphs: payload?.streamSubgraphs,
48
+ stream_resumable: payload?.streamResumable,
49
+ feedback_keys: payload?.feedbackKeys,
50
+ assistant_id: assistantId,
51
+ interrupt_before: payload?.interruptBefore,
52
+ interrupt_after: payload?.interruptAfter,
53
+ checkpoint: payload?.checkpoint,
54
+ checkpoint_id: payload?.checkpointId,
55
+ webhook: payload?.webhook,
56
+ multitask_strategy: payload?.multitaskStrategy,
57
+ on_completion: payload?.onCompletion,
58
+ on_disconnect: payload?.onDisconnect,
59
+ after_seconds: payload?.afterSeconds,
60
+ if_not_exists: payload?.ifNotExists,
61
+ checkpoint_during: payload?.checkpointDuring,
62
+ durability: payload?.durability,
63
+ };
64
+ const endpoint = threadId == null ? `/runs/stream` : `/threads/${threadId}/runs/stream`;
65
+ let [url, init] = this.prepareFetchOptions(endpoint, {
66
+ method: "POST",
67
+ json,
68
+ timeoutMs: null,
69
+ signal: payload?.signal,
70
+ });
71
+ if (this.onRequest != null) init = await this.onRequest(url, init);
72
+ const response = await this.asyncCaller.fetch(url, init);
73
+ const runMetadata = getRunMetadataFromResponse(response);
74
+ if (runMetadata) payload?.onRunCreated?.(runMetadata);
75
+ const stream: ReadableStream = (response.body || new ReadableStream({ start: (ctrl) => ctrl.close() })).pipeThrough(BytesLineDecoder()).pipeThrough(SSEDecoder());
76
+
77
+ return stream.pipeTo(new WritableStream({ write: (chunk) => payload.onCallback?.(chunk) }));
78
+ }.bind(client.runs);
79
+ return client as ILangGraphClient;
80
+ };
@@ -0,0 +1,2 @@
1
+ export * from "./LanggraphServer.js";
2
+ export * from "./LowJSServer.js";
@@ -0,0 +1,176 @@
1
+ /** copied from https://github.com/langchain-ai/langgraphjs/tree/main/libs/sdk/src/utils */
2
+ const CR = "\r".charCodeAt(0);
3
+ const LF = "\n".charCodeAt(0);
4
+ const NULL = "\0".charCodeAt(0);
5
+ const COLON = ":".charCodeAt(0);
6
+ const SPACE = " ".charCodeAt(0);
7
+
8
+ const TRAILING_NEWLINE = [CR, LF];
9
+
10
+ export function BytesLineDecoder() {
11
+ let buffer: Uint8Array[] = [];
12
+ let trailingCr = false;
13
+
14
+ return new TransformStream<Uint8Array, Uint8Array>({
15
+ start() {
16
+ buffer = [];
17
+ trailingCr = false;
18
+ },
19
+
20
+ transform(chunk, controller) {
21
+ // See https://docs.python.org/3/glossary.html#term-universal-newlines
22
+ let text = chunk;
23
+
24
+ // Handle trailing CR from previous chunk
25
+ if (trailingCr) {
26
+ text = joinArrays([[CR], text]);
27
+ trailingCr = false;
28
+ }
29
+
30
+ // Check for trailing CR in current chunk
31
+ if (text.length > 0 && text.at(-1) === CR) {
32
+ trailingCr = true;
33
+ text = text.subarray(0, -1);
34
+ }
35
+
36
+ if (!text.length) return;
37
+ const trailingNewline = TRAILING_NEWLINE.includes(text.at(-1)!);
38
+
39
+ const lastIdx = text.length - 1;
40
+ const { lines } = text.reduce<{ lines: Uint8Array[]; from: number }>(
41
+ (acc, cur, idx) => {
42
+ if (acc.from > idx) return acc;
43
+
44
+ if (cur === CR || cur === LF) {
45
+ acc.lines.push(text.subarray(acc.from, idx));
46
+ if (cur === CR && text[idx + 1] === LF) {
47
+ acc.from = idx + 2;
48
+ } else {
49
+ acc.from = idx + 1;
50
+ }
51
+ }
52
+
53
+ if (idx === lastIdx && acc.from <= lastIdx) {
54
+ acc.lines.push(text.subarray(acc.from));
55
+ }
56
+
57
+ return acc;
58
+ },
59
+ { lines: [], from: 0 }
60
+ );
61
+
62
+ if (lines.length === 1 && !trailingNewline) {
63
+ buffer.push(lines[0]);
64
+ return;
65
+ }
66
+
67
+ if (buffer.length) {
68
+ // Include existing buffer in first line
69
+ buffer.push(lines[0]);
70
+ lines[0] = joinArrays(buffer);
71
+ buffer = [];
72
+ }
73
+
74
+ if (!trailingNewline) {
75
+ // If the last segment is not newline terminated,
76
+ // buffer it for the next chunk
77
+ if (lines.length) buffer = [lines.pop()!];
78
+ }
79
+
80
+ // Enqueue complete lines
81
+ for (const line of lines) {
82
+ controller.enqueue(line);
83
+ }
84
+ },
85
+
86
+ flush(controller) {
87
+ if (buffer.length) {
88
+ controller.enqueue(joinArrays(buffer));
89
+ }
90
+ },
91
+ });
92
+ }
93
+
94
+ interface StreamPart {
95
+ id: string | undefined;
96
+ event: string;
97
+ data: unknown;
98
+ }
99
+
100
+ export function SSEDecoder() {
101
+ let event = "";
102
+ let data: Uint8Array[] = [];
103
+ let lastEventId = "";
104
+ let retry: number | null = null;
105
+
106
+ const decoder = new TextDecoder();
107
+
108
+ return new TransformStream<Uint8Array, StreamPart>({
109
+ transform(chunk, controller) {
110
+ // Handle empty line case
111
+ if (!chunk.length) {
112
+ if (!event && !data.length && !lastEventId && retry == null) return;
113
+
114
+ const sse = {
115
+ id: lastEventId || undefined,
116
+ event,
117
+ data: data.length ? decodeArraysToJson(decoder, data) : null,
118
+ };
119
+
120
+ // NOTE: as per the SSE spec, do not reset lastEventId
121
+ event = "";
122
+ data = [];
123
+ retry = null;
124
+
125
+ controller.enqueue(sse);
126
+ return;
127
+ }
128
+
129
+ // Ignore comments
130
+ if (chunk[0] === COLON) return;
131
+
132
+ const sepIdx = chunk.indexOf(COLON);
133
+ if (sepIdx === -1) return;
134
+
135
+ const fieldName = decoder.decode(chunk.subarray(0, sepIdx));
136
+ let value = chunk.subarray(sepIdx + 1);
137
+ if (value[0] === SPACE) value = value.subarray(1);
138
+
139
+ if (fieldName === "event") {
140
+ event = decoder.decode(value);
141
+ } else if (fieldName === "data") {
142
+ data.push(value);
143
+ } else if (fieldName === "id") {
144
+ if (value.indexOf(NULL) === -1) lastEventId = decoder.decode(value);
145
+ } else if (fieldName === "retry") {
146
+ const retryNum = Number.parseInt(decoder.decode(value), 10);
147
+ if (!Number.isNaN(retryNum)) retry = retryNum;
148
+ }
149
+ },
150
+
151
+ flush(controller) {
152
+ if (event) {
153
+ controller.enqueue({
154
+ id: lastEventId || undefined,
155
+ event,
156
+ data: data.length ? decodeArraysToJson(decoder, data) : null,
157
+ });
158
+ }
159
+ },
160
+ });
161
+ }
162
+
163
+ function joinArrays(data: ArrayLike<number>[]) {
164
+ const totalLength = data.reduce((acc, curr) => acc + curr.length, 0);
165
+ const merged = new Uint8Array(totalLength);
166
+ let offset = 0;
167
+ for (const c of data) {
168
+ merged.set(c, offset);
169
+ offset += c.length;
170
+ }
171
+ return merged;
172
+ }
173
+
174
+ function decodeArraysToJson(decoder: TextDecoder, data: ArrayLike<number>[]) {
175
+ return JSON.parse(decoder.decode(joinArrays(data)));
176
+ }
@@ -0,0 +1,114 @@
1
+ /** copied from https://github.com/langchain-ai/langgraphjs/tree/main/libs/sdk/src/utils */
2
+ // in this case don't quite match.
3
+ type IterableReadableStreamInterface<T> = ReadableStream<T> & AsyncIterable<T>;
4
+
5
+ /*
6
+ * Support async iterator syntax for ReadableStreams in all environments.
7
+ * Source: https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490
8
+ */
9
+ export class IterableReadableStream<T> extends ReadableStream<T> implements IterableReadableStreamInterface<T> {
10
+ /** @ts-ignore */
11
+ public reader: ReadableStreamDefaultReader<T>;
12
+
13
+ ensureReader() {
14
+ if (!this.reader) {
15
+ this.reader = this.getReader();
16
+ }
17
+ }
18
+
19
+ async next(): Promise<IteratorResult<T>> {
20
+ this.ensureReader();
21
+ try {
22
+ const result = await this.reader.read();
23
+ if (result.done) {
24
+ this.reader.releaseLock(); // release lock when stream becomes closed
25
+ return {
26
+ done: true,
27
+ value: undefined,
28
+ };
29
+ } else {
30
+ return {
31
+ done: false,
32
+ value: result.value,
33
+ };
34
+ }
35
+ } catch (e) {
36
+ this.reader.releaseLock(); // release lock when stream becomes errored
37
+ throw e;
38
+ }
39
+ }
40
+
41
+ async return(): Promise<IteratorResult<T>> {
42
+ this.ensureReader();
43
+ // If wrapped in a Node stream, cancel is already called.
44
+ if (this.locked) {
45
+ const cancelPromise = this.reader.cancel(); // cancel first, but don't await yet
46
+ this.reader.releaseLock(); // release lock first
47
+ await cancelPromise; // now await it
48
+ }
49
+ return { done: true, value: undefined };
50
+ }
51
+
52
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
53
+ async throw(e: any): Promise<IteratorResult<T>> {
54
+ this.ensureReader();
55
+ if (this.locked) {
56
+ const cancelPromise = this.reader.cancel(); // cancel first, but don't await yet
57
+ this.reader.releaseLock(); // release lock first
58
+ await cancelPromise; // now await it
59
+ }
60
+ throw e;
61
+ }
62
+
63
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
64
+ // @ts-ignore Not present in Node 18 types, required in latest Node 22
65
+ async [Symbol.asyncDispose]() {
66
+ await this.return();
67
+ }
68
+
69
+ [Symbol.asyncIterator]() {
70
+ return this;
71
+ }
72
+
73
+ static fromReadableStream<T>(stream: ReadableStream<T>) {
74
+ // From https://developer.mozilla.org/en-US/docs/Web/API/Streams_API/Using_readable_streams#reading_the_stream
75
+ const reader = stream.getReader();
76
+ return new IterableReadableStream<T>({
77
+ start(controller) {
78
+ return pump();
79
+ function pump(): Promise<T | undefined> {
80
+ return reader.read().then(({ done, value }) => {
81
+ // When no more data needs to be consumed, close the stream
82
+ if (done) {
83
+ controller.close();
84
+ return;
85
+ }
86
+ // Enqueue the next data chunk into our target stream
87
+ controller.enqueue(value);
88
+ return pump();
89
+ });
90
+ }
91
+ },
92
+ cancel() {
93
+ reader.releaseLock();
94
+ },
95
+ });
96
+ }
97
+
98
+ static fromAsyncGenerator<T>(generator: AsyncGenerator<T>) {
99
+ return new IterableReadableStream<T>({
100
+ async pull(controller) {
101
+ const { value, done } = await generator.next();
102
+ // When no more data needs to be consumed, close the stream
103
+ if (done) {
104
+ controller.close();
105
+ }
106
+ // Fix: `else if (value)` will hang the streaming when nullish value (e.g. empty string) is pulled
107
+ controller.enqueue(value);
108
+ },
109
+ async cancel(reason) {
110
+ await generator.return(reason);
111
+ },
112
+ });
113
+ }
114
+ }
package/src/index.ts CHANGED
@@ -6,3 +6,5 @@ export * from "./ui-store/index.js";
6
6
  export * from "./ToolManager.js";
7
7
  export * from "./TestKit.js";
8
8
  export * from "./artifacts/index.js";
9
+ export * from "./client/index.js";
10
+ export * from "./History.js";