@langchain/langgraph-sdk 0.0.38 → 0.0.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.cjs +10 -67
- package/dist/client.d.ts +18 -23
- package/dist/client.js +10 -67
- package/dist/index.d.ts +3 -0
- package/dist/react/debug.cjs +32 -0
- package/dist/react/debug.d.ts +23 -0
- package/dist/react/debug.js +28 -0
- package/dist/react/index.cjs +5 -0
- package/dist/react/index.d.ts +1 -0
- package/dist/react/index.js +1 -0
- package/dist/react/stream.cjs +411 -0
- package/dist/react/stream.d.ts +46 -0
- package/dist/react/stream.js +407 -0
- package/dist/types.d.ts +6 -16
- package/dist/types.messages.d.ts +88 -0
- package/dist/types.stream.cjs +2 -0
- package/dist/types.stream.d.ts +156 -0
- package/dist/types.stream.js +1 -0
- package/dist/utils/async_caller.cjs +2 -1
- package/dist/utils/async_caller.js +2 -1
- package/dist/utils/sse.cjs +157 -0
- package/dist/utils/sse.d.ts +11 -0
- package/dist/utils/sse.js +152 -0
- package/package.json +23 -3
- package/react.cjs +1 -0
- package/react.d.cts +1 -0
- package/react.d.ts +1 -0
- package/react.js +1 -0
- package/dist/utils/eventsource-parser/index.cjs +0 -7
- package/dist/utils/eventsource-parser/index.d.ts +0 -2
- package/dist/utils/eventsource-parser/index.js +0 -3
- package/dist/utils/eventsource-parser/parse.cjs +0 -150
- package/dist/utils/eventsource-parser/parse.d.ts +0 -18
- package/dist/utils/eventsource-parser/parse.js +0 -146
- package/dist/utils/eventsource-parser/stream.cjs +0 -34
- package/dist/utils/eventsource-parser/stream.d.ts +0 -17
- package/dist/utils/eventsource-parser/stream.js +0 -30
- package/dist/utils/eventsource-parser/types.d.ts +0 -81
- /package/dist/{utils/eventsource-parser/types.cjs → types.messages.cjs} +0 -0
- /package/dist/{utils/eventsource-parser/types.js → types.messages.js} +0 -0
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
import type { Message } from "./types.messages.js";
|
|
2
|
+
/**
|
|
3
|
+
* Stream modes
|
|
4
|
+
* - "values": Stream only the state values.
|
|
5
|
+
* - "messages": Stream complete messages.
|
|
6
|
+
* - "messages-tuple": Stream (message chunk, metadata) tuples.
|
|
7
|
+
* - "updates": Stream updates to the state.
|
|
8
|
+
* - "events": Stream events occurring during execution.
|
|
9
|
+
* - "debug": Stream detailed debug information.
|
|
10
|
+
* - "custom": Stream custom events.
|
|
11
|
+
*/
|
|
12
|
+
export type StreamMode = "values" | "messages" | "updates" | "events" | "debug" | "custom" | "messages-tuple";
|
|
13
|
+
type MessageTupleMetadata = {
|
|
14
|
+
tags: string[];
|
|
15
|
+
[key: string]: unknown;
|
|
16
|
+
};
|
|
17
|
+
type AsSubgraph<TEvent extends {
|
|
18
|
+
event: string;
|
|
19
|
+
data: unknown;
|
|
20
|
+
}> = {
|
|
21
|
+
event: TEvent["event"] | `${TEvent["event"]}|${string}`;
|
|
22
|
+
data: TEvent["data"];
|
|
23
|
+
};
|
|
24
|
+
/**
|
|
25
|
+
* Stream event with values after completion of each step.
|
|
26
|
+
*/
|
|
27
|
+
export type ValuesStreamEvent<StateType> = {
|
|
28
|
+
event: "values";
|
|
29
|
+
data: StateType;
|
|
30
|
+
};
|
|
31
|
+
/** @internal */
|
|
32
|
+
export type SubgraphValuesStreamEvent<StateType> = AsSubgraph<ValuesStreamEvent<StateType>>;
|
|
33
|
+
/**
|
|
34
|
+
* Stream event with message chunks coming from LLM invocations inside nodes.
|
|
35
|
+
*/
|
|
36
|
+
export type MessagesTupleStreamEvent = {
|
|
37
|
+
event: "messages";
|
|
38
|
+
data: [message: Message, config: MessageTupleMetadata];
|
|
39
|
+
};
|
|
40
|
+
/** @internal */
|
|
41
|
+
export type SubgraphMessagesTupleStreamEvent = AsSubgraph<MessagesTupleStreamEvent>;
|
|
42
|
+
/**
|
|
43
|
+
* Metadata stream event with information about the run and thread
|
|
44
|
+
*/
|
|
45
|
+
export type MetadataStreamEvent = {
|
|
46
|
+
event: "metadata";
|
|
47
|
+
data: {
|
|
48
|
+
run_id: string;
|
|
49
|
+
thread_id: string;
|
|
50
|
+
};
|
|
51
|
+
};
|
|
52
|
+
/**
|
|
53
|
+
* Stream event with error information.
|
|
54
|
+
*/
|
|
55
|
+
export type ErrorStreamEvent = {
|
|
56
|
+
event: "error";
|
|
57
|
+
data: {
|
|
58
|
+
error: string;
|
|
59
|
+
message: string;
|
|
60
|
+
};
|
|
61
|
+
};
|
|
62
|
+
/** @internal */
|
|
63
|
+
export type SubgraphErrorStreamEvent = AsSubgraph<ErrorStreamEvent>;
|
|
64
|
+
/**
|
|
65
|
+
* Stream event with updates to the state after each step.
|
|
66
|
+
* The streamed outputs include the name of the node that
|
|
67
|
+
* produced the update as well as the update.
|
|
68
|
+
*/
|
|
69
|
+
export type UpdatesStreamEvent<UpdateType> = {
|
|
70
|
+
event: "updates";
|
|
71
|
+
data: {
|
|
72
|
+
[node: string]: UpdateType;
|
|
73
|
+
};
|
|
74
|
+
};
|
|
75
|
+
/** @internal */
|
|
76
|
+
export type SubgraphUpdatesStreamEvent<UpdateType> = AsSubgraph<UpdatesStreamEvent<UpdateType>>;
|
|
77
|
+
/**
|
|
78
|
+
* Streaming custom data from inside the nodes.
|
|
79
|
+
*/
|
|
80
|
+
export type CustomStreamEvent<T> = {
|
|
81
|
+
event: "custom";
|
|
82
|
+
data: T;
|
|
83
|
+
};
|
|
84
|
+
/** @internal */
|
|
85
|
+
export type SubgraphCustomStreamEvent<T> = AsSubgraph<CustomStreamEvent<T>>;
|
|
86
|
+
type MessagesMetadataStreamEvent = {
|
|
87
|
+
event: "messages/metadata";
|
|
88
|
+
data: {
|
|
89
|
+
[messageId: string]: {
|
|
90
|
+
metadata: unknown;
|
|
91
|
+
};
|
|
92
|
+
};
|
|
93
|
+
};
|
|
94
|
+
type MessagesCompleteStreamEvent = {
|
|
95
|
+
event: "messages/complete";
|
|
96
|
+
data: Message[];
|
|
97
|
+
};
|
|
98
|
+
type MessagesPartialStreamEvent = {
|
|
99
|
+
event: "messages/partial";
|
|
100
|
+
data: Message[];
|
|
101
|
+
};
|
|
102
|
+
/**
|
|
103
|
+
* Message stream event specific to LangGraph Server.
|
|
104
|
+
* @deprecated Use `streamMode: "messages-tuple"` instead.
|
|
105
|
+
*/
|
|
106
|
+
export type MessagesStreamEvent = MessagesMetadataStreamEvent | MessagesCompleteStreamEvent | MessagesPartialStreamEvent;
|
|
107
|
+
/** @internal */
|
|
108
|
+
export type SubgraphMessagesStreamEvent = AsSubgraph<MessagesMetadataStreamEvent> | AsSubgraph<MessagesCompleteStreamEvent> | AsSubgraph<MessagesPartialStreamEvent>;
|
|
109
|
+
/**
|
|
110
|
+
* Stream event with detailed debug information.
|
|
111
|
+
*/
|
|
112
|
+
export type DebugStreamEvent = {
|
|
113
|
+
event: "debug";
|
|
114
|
+
data: unknown;
|
|
115
|
+
};
|
|
116
|
+
/** @internal */
|
|
117
|
+
export type SubgraphDebugStreamEvent = AsSubgraph<DebugStreamEvent>;
|
|
118
|
+
/**
|
|
119
|
+
* Stream event with events occurring during execution.
|
|
120
|
+
*/
|
|
121
|
+
export type EventsStreamEvent = {
|
|
122
|
+
event: "events";
|
|
123
|
+
data: unknown;
|
|
124
|
+
};
|
|
125
|
+
/** @internal */
|
|
126
|
+
export type SubgraphEventsStreamEvent = AsSubgraph<EventsStreamEvent>;
|
|
127
|
+
/**
|
|
128
|
+
* Stream event with a feedback key to signed URL map. Set `feedbackKeys` in
|
|
129
|
+
* the `RunsStreamPayload` to receive this event.
|
|
130
|
+
*/
|
|
131
|
+
export type FeedbackStreamEvent = {
|
|
132
|
+
event: "feedback";
|
|
133
|
+
data: {
|
|
134
|
+
[feedbackKey: string]: string;
|
|
135
|
+
};
|
|
136
|
+
};
|
|
137
|
+
type GetStreamModeMap<TStreamMode extends StreamMode | StreamMode[], TStateType = unknown, TUpdateType = TStateType, TCustomType = unknown> = {
|
|
138
|
+
values: ValuesStreamEvent<TStateType>;
|
|
139
|
+
updates: UpdatesStreamEvent<TUpdateType>;
|
|
140
|
+
custom: CustomStreamEvent<TCustomType>;
|
|
141
|
+
debug: DebugStreamEvent;
|
|
142
|
+
messages: MessagesStreamEvent;
|
|
143
|
+
"messages-tuple": MessagesTupleStreamEvent;
|
|
144
|
+
events: EventsStreamEvent;
|
|
145
|
+
}[TStreamMode extends StreamMode[] ? TStreamMode[number] : TStreamMode] | ErrorStreamEvent | MetadataStreamEvent | FeedbackStreamEvent;
|
|
146
|
+
type GetSubgraphsStreamModeMap<TStreamMode extends StreamMode | StreamMode[], TStateType = unknown, TUpdateType = TStateType, TCustomType = unknown> = {
|
|
147
|
+
values: SubgraphValuesStreamEvent<TStateType>;
|
|
148
|
+
updates: SubgraphUpdatesStreamEvent<TUpdateType>;
|
|
149
|
+
custom: SubgraphCustomStreamEvent<TCustomType>;
|
|
150
|
+
debug: SubgraphDebugStreamEvent;
|
|
151
|
+
messages: SubgraphMessagesStreamEvent;
|
|
152
|
+
"messages-tuple": SubgraphMessagesTupleStreamEvent;
|
|
153
|
+
events: SubgraphEventsStreamEvent;
|
|
154
|
+
}[TStreamMode extends StreamMode[] ? TStreamMode[number] : TStreamMode] | SubgraphErrorStreamEvent | MetadataStreamEvent | FeedbackStreamEvent;
|
|
155
|
+
export type TypedAsyncGenerator<TStreamMode extends StreamMode | StreamMode[] = [], TSubgraphs extends boolean = false, TStateType = unknown, TUpdateType = TStateType, TCustomType = unknown> = AsyncGenerator<TSubgraphs extends true ? GetSubgraphsStreamModeMap<TStreamMode, TStateType, TUpdateType, TCustomType> : GetStreamModeMap<TStreamMode, TStateType, TUpdateType, TCustomType>>;
|
|
156
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -6,6 +6,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
6
6
|
exports.AsyncCaller = void 0;
|
|
7
7
|
const p_retry_1 = __importDefault(require("p-retry"));
|
|
8
8
|
const p_queue_1 = __importDefault(require("p-queue"));
|
|
9
|
+
const fetch_js_1 = require("../singletons/fetch.cjs");
|
|
9
10
|
const STATUS_NO_RETRY = [
|
|
10
11
|
400, // Bad Request
|
|
11
12
|
401, // Unauthorized
|
|
@@ -189,7 +190,7 @@ class AsyncCaller {
|
|
|
189
190
|
return this.call(callable, ...args);
|
|
190
191
|
}
|
|
191
192
|
fetch(...args) {
|
|
192
|
-
const fetchFn = this.customFetch ??
|
|
193
|
+
const fetchFn = this.customFetch ?? (0, fetch_js_1._getFetchImplementation)();
|
|
193
194
|
return this.call(() => fetchFn(...args).then((res) => (res.ok ? res : Promise.reject(res))));
|
|
194
195
|
}
|
|
195
196
|
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import pRetry from "p-retry";
|
|
2
2
|
import PQueueMod from "p-queue";
|
|
3
|
+
import { _getFetchImplementation } from "../singletons/fetch.js";
|
|
3
4
|
const STATUS_NO_RETRY = [
|
|
4
5
|
400, // Bad Request
|
|
5
6
|
401, // Unauthorized
|
|
@@ -183,7 +184,7 @@ export class AsyncCaller {
|
|
|
183
184
|
return this.call(callable, ...args);
|
|
184
185
|
}
|
|
185
186
|
fetch(...args) {
|
|
186
|
-
const fetchFn = this.customFetch ??
|
|
187
|
+
const fetchFn = this.customFetch ?? _getFetchImplementation();
|
|
187
188
|
return this.call(() => fetchFn(...args).then((res) => (res.ok ? res : Promise.reject(res))));
|
|
188
189
|
}
|
|
189
190
|
}
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SSEDecoder = exports.BytesLineDecoder = void 0;
|
|
4
|
+
const CR = "\r".charCodeAt(0);
|
|
5
|
+
const LF = "\n".charCodeAt(0);
|
|
6
|
+
const NULL = "\0".charCodeAt(0);
|
|
7
|
+
const COLON = ":".charCodeAt(0);
|
|
8
|
+
const SPACE = " ".charCodeAt(0);
|
|
9
|
+
const TRAILING_NEWLINE = [CR, LF];
|
|
10
|
+
class BytesLineDecoder extends TransformStream {
|
|
11
|
+
constructor() {
|
|
12
|
+
let buffer = [];
|
|
13
|
+
let trailingCr = false;
|
|
14
|
+
super({
|
|
15
|
+
start() {
|
|
16
|
+
buffer = [];
|
|
17
|
+
trailingCr = false;
|
|
18
|
+
},
|
|
19
|
+
transform(chunk, controller) {
|
|
20
|
+
// See https://docs.python.org/3/glossary.html#term-universal-newlines
|
|
21
|
+
let text = chunk;
|
|
22
|
+
// Handle trailing CR from previous chunk
|
|
23
|
+
if (trailingCr) {
|
|
24
|
+
text = joinArrays([[CR], text]);
|
|
25
|
+
trailingCr = false;
|
|
26
|
+
}
|
|
27
|
+
// Check for trailing CR in current chunk
|
|
28
|
+
if (text.length > 0 && text.at(-1) === CR) {
|
|
29
|
+
trailingCr = true;
|
|
30
|
+
text = text.subarray(0, -1);
|
|
31
|
+
}
|
|
32
|
+
if (!text.length)
|
|
33
|
+
return;
|
|
34
|
+
const trailingNewline = TRAILING_NEWLINE.includes(text.at(-1));
|
|
35
|
+
const lastIdx = text.length - 1;
|
|
36
|
+
const { lines } = text.reduce((acc, cur, idx) => {
|
|
37
|
+
if (acc.from > idx)
|
|
38
|
+
return acc;
|
|
39
|
+
if (cur === CR || cur === LF) {
|
|
40
|
+
acc.lines.push(text.subarray(acc.from, idx));
|
|
41
|
+
if (cur === CR && text[idx + 1] === LF) {
|
|
42
|
+
acc.from = idx + 2;
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
acc.from = idx + 1;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
if (idx === lastIdx && acc.from <= lastIdx) {
|
|
49
|
+
acc.lines.push(text.subarray(acc.from));
|
|
50
|
+
}
|
|
51
|
+
return acc;
|
|
52
|
+
}, { lines: [], from: 0 });
|
|
53
|
+
if (lines.length === 1 && !trailingNewline) {
|
|
54
|
+
buffer.push(lines[0]);
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
if (buffer.length) {
|
|
58
|
+
// Include existing buffer in first line
|
|
59
|
+
buffer.push(lines[0]);
|
|
60
|
+
lines[0] = joinArrays(buffer);
|
|
61
|
+
buffer = [];
|
|
62
|
+
}
|
|
63
|
+
if (!trailingNewline) {
|
|
64
|
+
// If the last segment is not newline terminated,
|
|
65
|
+
// buffer it for the next chunk
|
|
66
|
+
if (lines.length)
|
|
67
|
+
buffer = [lines.pop()];
|
|
68
|
+
}
|
|
69
|
+
// Enqueue complete lines
|
|
70
|
+
for (const line of lines) {
|
|
71
|
+
controller.enqueue(line);
|
|
72
|
+
}
|
|
73
|
+
},
|
|
74
|
+
flush(controller) {
|
|
75
|
+
if (buffer.length) {
|
|
76
|
+
controller.enqueue(joinArrays(buffer));
|
|
77
|
+
}
|
|
78
|
+
},
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
exports.BytesLineDecoder = BytesLineDecoder;
|
|
83
|
+
class SSEDecoder extends TransformStream {
|
|
84
|
+
constructor() {
|
|
85
|
+
let event = "";
|
|
86
|
+
let data = [];
|
|
87
|
+
let lastEventId = "";
|
|
88
|
+
let retry = null;
|
|
89
|
+
const decoder = new TextDecoder();
|
|
90
|
+
super({
|
|
91
|
+
transform(chunk, controller) {
|
|
92
|
+
// Handle empty line case
|
|
93
|
+
if (!chunk.length) {
|
|
94
|
+
if (!event && !data.length && !lastEventId && retry == null)
|
|
95
|
+
return;
|
|
96
|
+
const sse = {
|
|
97
|
+
event,
|
|
98
|
+
data: data.length ? decodeArraysToJson(decoder, data) : null,
|
|
99
|
+
};
|
|
100
|
+
// NOTE: as per the SSE spec, do not reset lastEventId
|
|
101
|
+
event = "";
|
|
102
|
+
data = [];
|
|
103
|
+
retry = null;
|
|
104
|
+
controller.enqueue(sse);
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
// Ignore comments
|
|
108
|
+
if (chunk[0] === COLON)
|
|
109
|
+
return;
|
|
110
|
+
const sepIdx = chunk.indexOf(COLON);
|
|
111
|
+
if (sepIdx === -1)
|
|
112
|
+
return;
|
|
113
|
+
const fieldName = decoder.decode(chunk.subarray(0, sepIdx));
|
|
114
|
+
let value = chunk.subarray(sepIdx + 1);
|
|
115
|
+
if (value[0] === SPACE)
|
|
116
|
+
value = value.subarray(1);
|
|
117
|
+
if (fieldName === "event") {
|
|
118
|
+
event = decoder.decode(value);
|
|
119
|
+
}
|
|
120
|
+
else if (fieldName === "data") {
|
|
121
|
+
data.push(value);
|
|
122
|
+
}
|
|
123
|
+
else if (fieldName === "id") {
|
|
124
|
+
if (value.indexOf(NULL) === -1)
|
|
125
|
+
lastEventId = decoder.decode(value);
|
|
126
|
+
}
|
|
127
|
+
else if (fieldName === "retry") {
|
|
128
|
+
const retryNum = Number.parseInt(decoder.decode(value));
|
|
129
|
+
if (!Number.isNaN(retryNum))
|
|
130
|
+
retry = retryNum;
|
|
131
|
+
}
|
|
132
|
+
},
|
|
133
|
+
flush(controller) {
|
|
134
|
+
if (event) {
|
|
135
|
+
controller.enqueue({
|
|
136
|
+
event,
|
|
137
|
+
data: data.length ? decodeArraysToJson(decoder, data) : null,
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
},
|
|
141
|
+
});
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
exports.SSEDecoder = SSEDecoder;
|
|
145
|
+
function joinArrays(data) {
|
|
146
|
+
const totalLength = data.reduce((acc, curr) => acc + curr.length, 0);
|
|
147
|
+
let merged = new Uint8Array(totalLength);
|
|
148
|
+
let offset = 0;
|
|
149
|
+
for (const c of data) {
|
|
150
|
+
merged.set(c, offset);
|
|
151
|
+
offset += c.length;
|
|
152
|
+
}
|
|
153
|
+
return merged;
|
|
154
|
+
}
|
|
155
|
+
function decodeArraysToJson(decoder, data) {
|
|
156
|
+
return JSON.parse(decoder.decode(joinArrays(data)));
|
|
157
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export declare class BytesLineDecoder extends TransformStream<Uint8Array, Uint8Array> {
|
|
2
|
+
constructor();
|
|
3
|
+
}
|
|
4
|
+
interface StreamPart {
|
|
5
|
+
event: string;
|
|
6
|
+
data: unknown;
|
|
7
|
+
}
|
|
8
|
+
export declare class SSEDecoder extends TransformStream<Uint8Array, StreamPart> {
|
|
9
|
+
constructor();
|
|
10
|
+
}
|
|
11
|
+
export {};
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
const CR = "\r".charCodeAt(0);
|
|
2
|
+
const LF = "\n".charCodeAt(0);
|
|
3
|
+
const NULL = "\0".charCodeAt(0);
|
|
4
|
+
const COLON = ":".charCodeAt(0);
|
|
5
|
+
const SPACE = " ".charCodeAt(0);
|
|
6
|
+
const TRAILING_NEWLINE = [CR, LF];
|
|
7
|
+
export class BytesLineDecoder extends TransformStream {
|
|
8
|
+
constructor() {
|
|
9
|
+
let buffer = [];
|
|
10
|
+
let trailingCr = false;
|
|
11
|
+
super({
|
|
12
|
+
start() {
|
|
13
|
+
buffer = [];
|
|
14
|
+
trailingCr = false;
|
|
15
|
+
},
|
|
16
|
+
transform(chunk, controller) {
|
|
17
|
+
// See https://docs.python.org/3/glossary.html#term-universal-newlines
|
|
18
|
+
let text = chunk;
|
|
19
|
+
// Handle trailing CR from previous chunk
|
|
20
|
+
if (trailingCr) {
|
|
21
|
+
text = joinArrays([[CR], text]);
|
|
22
|
+
trailingCr = false;
|
|
23
|
+
}
|
|
24
|
+
// Check for trailing CR in current chunk
|
|
25
|
+
if (text.length > 0 && text.at(-1) === CR) {
|
|
26
|
+
trailingCr = true;
|
|
27
|
+
text = text.subarray(0, -1);
|
|
28
|
+
}
|
|
29
|
+
if (!text.length)
|
|
30
|
+
return;
|
|
31
|
+
const trailingNewline = TRAILING_NEWLINE.includes(text.at(-1));
|
|
32
|
+
const lastIdx = text.length - 1;
|
|
33
|
+
const { lines } = text.reduce((acc, cur, idx) => {
|
|
34
|
+
if (acc.from > idx)
|
|
35
|
+
return acc;
|
|
36
|
+
if (cur === CR || cur === LF) {
|
|
37
|
+
acc.lines.push(text.subarray(acc.from, idx));
|
|
38
|
+
if (cur === CR && text[idx + 1] === LF) {
|
|
39
|
+
acc.from = idx + 2;
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
acc.from = idx + 1;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
if (idx === lastIdx && acc.from <= lastIdx) {
|
|
46
|
+
acc.lines.push(text.subarray(acc.from));
|
|
47
|
+
}
|
|
48
|
+
return acc;
|
|
49
|
+
}, { lines: [], from: 0 });
|
|
50
|
+
if (lines.length === 1 && !trailingNewline) {
|
|
51
|
+
buffer.push(lines[0]);
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
if (buffer.length) {
|
|
55
|
+
// Include existing buffer in first line
|
|
56
|
+
buffer.push(lines[0]);
|
|
57
|
+
lines[0] = joinArrays(buffer);
|
|
58
|
+
buffer = [];
|
|
59
|
+
}
|
|
60
|
+
if (!trailingNewline) {
|
|
61
|
+
// If the last segment is not newline terminated,
|
|
62
|
+
// buffer it for the next chunk
|
|
63
|
+
if (lines.length)
|
|
64
|
+
buffer = [lines.pop()];
|
|
65
|
+
}
|
|
66
|
+
// Enqueue complete lines
|
|
67
|
+
for (const line of lines) {
|
|
68
|
+
controller.enqueue(line);
|
|
69
|
+
}
|
|
70
|
+
},
|
|
71
|
+
flush(controller) {
|
|
72
|
+
if (buffer.length) {
|
|
73
|
+
controller.enqueue(joinArrays(buffer));
|
|
74
|
+
}
|
|
75
|
+
},
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
export class SSEDecoder extends TransformStream {
|
|
80
|
+
constructor() {
|
|
81
|
+
let event = "";
|
|
82
|
+
let data = [];
|
|
83
|
+
let lastEventId = "";
|
|
84
|
+
let retry = null;
|
|
85
|
+
const decoder = new TextDecoder();
|
|
86
|
+
super({
|
|
87
|
+
transform(chunk, controller) {
|
|
88
|
+
// Handle empty line case
|
|
89
|
+
if (!chunk.length) {
|
|
90
|
+
if (!event && !data.length && !lastEventId && retry == null)
|
|
91
|
+
return;
|
|
92
|
+
const sse = {
|
|
93
|
+
event,
|
|
94
|
+
data: data.length ? decodeArraysToJson(decoder, data) : null,
|
|
95
|
+
};
|
|
96
|
+
// NOTE: as per the SSE spec, do not reset lastEventId
|
|
97
|
+
event = "";
|
|
98
|
+
data = [];
|
|
99
|
+
retry = null;
|
|
100
|
+
controller.enqueue(sse);
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
// Ignore comments
|
|
104
|
+
if (chunk[0] === COLON)
|
|
105
|
+
return;
|
|
106
|
+
const sepIdx = chunk.indexOf(COLON);
|
|
107
|
+
if (sepIdx === -1)
|
|
108
|
+
return;
|
|
109
|
+
const fieldName = decoder.decode(chunk.subarray(0, sepIdx));
|
|
110
|
+
let value = chunk.subarray(sepIdx + 1);
|
|
111
|
+
if (value[0] === SPACE)
|
|
112
|
+
value = value.subarray(1);
|
|
113
|
+
if (fieldName === "event") {
|
|
114
|
+
event = decoder.decode(value);
|
|
115
|
+
}
|
|
116
|
+
else if (fieldName === "data") {
|
|
117
|
+
data.push(value);
|
|
118
|
+
}
|
|
119
|
+
else if (fieldName === "id") {
|
|
120
|
+
if (value.indexOf(NULL) === -1)
|
|
121
|
+
lastEventId = decoder.decode(value);
|
|
122
|
+
}
|
|
123
|
+
else if (fieldName === "retry") {
|
|
124
|
+
const retryNum = Number.parseInt(decoder.decode(value));
|
|
125
|
+
if (!Number.isNaN(retryNum))
|
|
126
|
+
retry = retryNum;
|
|
127
|
+
}
|
|
128
|
+
},
|
|
129
|
+
flush(controller) {
|
|
130
|
+
if (event) {
|
|
131
|
+
controller.enqueue({
|
|
132
|
+
event,
|
|
133
|
+
data: data.length ? decodeArraysToJson(decoder, data) : null,
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
function joinArrays(data) {
|
|
141
|
+
const totalLength = data.reduce((acc, curr) => acc + curr.length, 0);
|
|
142
|
+
let merged = new Uint8Array(totalLength);
|
|
143
|
+
let offset = 0;
|
|
144
|
+
for (const c of data) {
|
|
145
|
+
merged.set(c, offset);
|
|
146
|
+
offset += c.length;
|
|
147
|
+
}
|
|
148
|
+
return merged;
|
|
149
|
+
}
|
|
150
|
+
function decodeArraysToJson(decoder, data) {
|
|
151
|
+
return JSON.parse(decoder.decode(joinArrays(data)));
|
|
152
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/langgraph-sdk",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.40",
|
|
4
4
|
"description": "Client library for interacting with the LangGraph API",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"packageManager": "yarn@1.22.19",
|
|
@@ -22,18 +22,25 @@
|
|
|
22
22
|
},
|
|
23
23
|
"devDependencies": {
|
|
24
24
|
"@jest/globals": "^29.7.0",
|
|
25
|
+
"@langchain/core": "^0.3.31",
|
|
25
26
|
"@langchain/scripts": "^0.1.4",
|
|
26
27
|
"@tsconfig/recommended": "^1.0.2",
|
|
27
28
|
"@types/jest": "^29.5.12",
|
|
28
29
|
"@types/node": "^20.12.12",
|
|
29
30
|
"@types/uuid": "^9.0.1",
|
|
31
|
+
"@types/react": "18.3.2",
|
|
30
32
|
"concat-md": "^0.5.1",
|
|
31
33
|
"jest": "^29.7.0",
|
|
32
34
|
"prettier": "^3.2.5",
|
|
33
35
|
"ts-jest": "^29.1.2",
|
|
34
36
|
"typedoc": "^0.26.1",
|
|
35
37
|
"typedoc-plugin-markdown": "^4.1.0",
|
|
36
|
-
"typescript": "^5.4.5"
|
|
38
|
+
"typescript": "^5.4.5",
|
|
39
|
+
"react": "^18.3.1"
|
|
40
|
+
},
|
|
41
|
+
"peerDependencies": {
|
|
42
|
+
"react": "^18 || ^19",
|
|
43
|
+
"@langchain/core": ">=0.2.31 <0.4.0"
|
|
37
44
|
},
|
|
38
45
|
"exports": {
|
|
39
46
|
".": {
|
|
@@ -54,6 +61,15 @@
|
|
|
54
61
|
"import": "./client.js",
|
|
55
62
|
"require": "./client.cjs"
|
|
56
63
|
},
|
|
64
|
+
"./react": {
|
|
65
|
+
"types": {
|
|
66
|
+
"import": "./react.d.ts",
|
|
67
|
+
"require": "./react.d.cts",
|
|
68
|
+
"default": "./react.d.ts"
|
|
69
|
+
},
|
|
70
|
+
"import": "./react.js",
|
|
71
|
+
"require": "./react.cjs"
|
|
72
|
+
},
|
|
57
73
|
"./package.json": "./package.json"
|
|
58
74
|
},
|
|
59
75
|
"files": [
|
|
@@ -65,6 +81,10 @@
|
|
|
65
81
|
"client.cjs",
|
|
66
82
|
"client.js",
|
|
67
83
|
"client.d.ts",
|
|
68
|
-
"client.d.cts"
|
|
84
|
+
"client.d.cts",
|
|
85
|
+
"react.cjs",
|
|
86
|
+
"react.js",
|
|
87
|
+
"react.d.ts",
|
|
88
|
+
"react.d.cts"
|
|
69
89
|
]
|
|
70
90
|
}
|
package/react.cjs
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('./dist/react/index.cjs');
|
package/react.d.cts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './dist/react/index.js'
|
package/react.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './dist/react/index.js'
|
package/react.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './dist/react/index.js'
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// From https://github.com/rexxars/eventsource-parser
|
|
3
|
-
// Inlined due to CJS import issues
|
|
4
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
-
exports.createParser = void 0;
|
|
6
|
-
var parse_js_1 = require("./parse.cjs");
|
|
7
|
-
Object.defineProperty(exports, "createParser", { enumerable: true, get: function () { return parse_js_1.createParser; } });
|