@langchain/langgraph-sdk 0.0.8 → 0.0.10-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +1 -1
- package/client.js +1 -1
- package/dist/client.cjs +677 -0
- package/dist/{client.d.mts → client.d.ts} +30 -5
- package/dist/{client.mjs → client.js} +81 -12
- package/dist/index.cjs +5 -0
- package/dist/{index.d.mts → index.d.ts} +1 -1
- package/dist/index.js +1 -0
- package/dist/types.cjs +2 -0
- package/dist/{types.d.mts → types.d.ts} +20 -4
- package/dist/utils/async_caller.cjs +195 -0
- package/dist/utils/{async_caller.d.mts → async_caller.d.ts} +1 -1
- package/dist/utils/eventsource-parser/index.cjs +7 -0
- package/dist/utils/eventsource-parser/index.d.ts +2 -0
- package/dist/utils/eventsource-parser/index.js +3 -0
- package/dist/utils/eventsource-parser/parse.cjs +150 -0
- package/dist/utils/eventsource-parser/parse.d.ts +18 -0
- package/dist/utils/eventsource-parser/parse.js +146 -0
- package/dist/utils/eventsource-parser/stream.cjs +34 -0
- package/dist/utils/eventsource-parser/stream.d.ts +17 -0
- package/dist/utils/eventsource-parser/stream.js +30 -0
- package/dist/utils/eventsource-parser/types.cjs +2 -0
- package/dist/utils/eventsource-parser/types.d.ts +81 -0
- package/dist/utils/eventsource-parser/types.js +1 -0
- package/dist/utils/stream.cjs +115 -0
- package/dist/utils/{stream.d.mts → stream.d.ts} +1 -0
- package/dist/utils/{stream.mjs → stream.js} +5 -0
- package/index.d.ts +1 -1
- package/index.js +1 -1
- package/package.json +1 -2
- package/dist/index.mjs +0 -1
- /package/dist/{types.mjs → types.js} +0 -0
- /package/dist/utils/{async_caller.mjs → async_caller.js} +0 -0
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Creates a new EventSource parser.
|
|
3
|
+
*
|
|
4
|
+
* @param onParse - Callback to invoke when a new event is parsed, or a new reconnection interval
|
|
5
|
+
* has been sent from the server
|
|
6
|
+
*
|
|
7
|
+
* @returns A new EventSource parser, with `parse` and `reset` methods.
|
|
8
|
+
* @public
|
|
9
|
+
*/
|
|
10
|
+
export function createParser(onParse) {
|
|
11
|
+
// Processing state
|
|
12
|
+
let isFirstChunk;
|
|
13
|
+
let buffer;
|
|
14
|
+
let startingPosition;
|
|
15
|
+
let startingFieldLength;
|
|
16
|
+
// Event state
|
|
17
|
+
let eventId;
|
|
18
|
+
let eventName;
|
|
19
|
+
let data;
|
|
20
|
+
reset();
|
|
21
|
+
return { feed, reset };
|
|
22
|
+
function reset() {
|
|
23
|
+
isFirstChunk = true;
|
|
24
|
+
buffer = "";
|
|
25
|
+
startingPosition = 0;
|
|
26
|
+
startingFieldLength = -1;
|
|
27
|
+
eventId = undefined;
|
|
28
|
+
eventName = undefined;
|
|
29
|
+
data = "";
|
|
30
|
+
}
|
|
31
|
+
function feed(chunk) {
|
|
32
|
+
buffer = buffer ? buffer + chunk : chunk;
|
|
33
|
+
// Strip any UTF8 byte order mark (BOM) at the start of the stream.
|
|
34
|
+
// Note that we do not strip any non - UTF8 BOM, as eventsource streams are
|
|
35
|
+
// always decoded as UTF8 as per the specification.
|
|
36
|
+
if (isFirstChunk && hasBom(buffer)) {
|
|
37
|
+
buffer = buffer.slice(BOM.length);
|
|
38
|
+
}
|
|
39
|
+
isFirstChunk = false;
|
|
40
|
+
// Set up chunk-specific processing state
|
|
41
|
+
const length = buffer.length;
|
|
42
|
+
let position = 0;
|
|
43
|
+
let discardTrailingNewline = false;
|
|
44
|
+
// Read the current buffer byte by byte
|
|
45
|
+
while (position < length) {
|
|
46
|
+
// EventSource allows for carriage return + line feed, which means we
|
|
47
|
+
// need to ignore a linefeed character if the previous character was a
|
|
48
|
+
// carriage return
|
|
49
|
+
// @todo refactor to reduce nesting, consider checking previous byte?
|
|
50
|
+
// @todo but consider multiple chunks etc
|
|
51
|
+
if (discardTrailingNewline) {
|
|
52
|
+
if (buffer[position] === "\n") {
|
|
53
|
+
++position;
|
|
54
|
+
}
|
|
55
|
+
discardTrailingNewline = false;
|
|
56
|
+
}
|
|
57
|
+
let lineLength = -1;
|
|
58
|
+
let fieldLength = startingFieldLength;
|
|
59
|
+
let character;
|
|
60
|
+
for (let index = startingPosition; lineLength < 0 && index < length; ++index) {
|
|
61
|
+
character = buffer[index];
|
|
62
|
+
if (character === ":" && fieldLength < 0) {
|
|
63
|
+
fieldLength = index - position;
|
|
64
|
+
}
|
|
65
|
+
else if (character === "\r") {
|
|
66
|
+
discardTrailingNewline = true;
|
|
67
|
+
lineLength = index - position;
|
|
68
|
+
}
|
|
69
|
+
else if (character === "\n") {
|
|
70
|
+
lineLength = index - position;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
if (lineLength < 0) {
|
|
74
|
+
startingPosition = length - position;
|
|
75
|
+
startingFieldLength = fieldLength;
|
|
76
|
+
break;
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
startingPosition = 0;
|
|
80
|
+
startingFieldLength = -1;
|
|
81
|
+
}
|
|
82
|
+
parseEventStreamLine(buffer, position, fieldLength, lineLength);
|
|
83
|
+
position += lineLength + 1;
|
|
84
|
+
}
|
|
85
|
+
if (position === length) {
|
|
86
|
+
// If we consumed the entire buffer to read the event, reset the buffer
|
|
87
|
+
buffer = "";
|
|
88
|
+
}
|
|
89
|
+
else if (position > 0) {
|
|
90
|
+
// If there are bytes left to process, set the buffer to the unprocessed
|
|
91
|
+
// portion of the buffer only
|
|
92
|
+
buffer = buffer.slice(position);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
function parseEventStreamLine(lineBuffer, index, fieldLength, lineLength) {
|
|
96
|
+
if (lineLength === 0) {
|
|
97
|
+
// We reached the last line of this event
|
|
98
|
+
if (data.length > 0) {
|
|
99
|
+
onParse({
|
|
100
|
+
type: "event",
|
|
101
|
+
id: eventId,
|
|
102
|
+
event: eventName || undefined,
|
|
103
|
+
data: data.slice(0, -1), // remove trailing newline
|
|
104
|
+
});
|
|
105
|
+
data = "";
|
|
106
|
+
eventId = undefined;
|
|
107
|
+
}
|
|
108
|
+
eventName = undefined;
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
const noValue = fieldLength < 0;
|
|
112
|
+
const field = lineBuffer.slice(index, index + (noValue ? lineLength : fieldLength));
|
|
113
|
+
let step = 0;
|
|
114
|
+
if (noValue) {
|
|
115
|
+
step = lineLength;
|
|
116
|
+
}
|
|
117
|
+
else if (lineBuffer[index + fieldLength + 1] === " ") {
|
|
118
|
+
step = fieldLength + 2;
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
step = fieldLength + 1;
|
|
122
|
+
}
|
|
123
|
+
const position = index + step;
|
|
124
|
+
const valueLength = lineLength - step;
|
|
125
|
+
const value = lineBuffer.slice(position, position + valueLength).toString();
|
|
126
|
+
if (field === "data") {
|
|
127
|
+
data += value ? `${value}\n` : "\n";
|
|
128
|
+
}
|
|
129
|
+
else if (field === "event") {
|
|
130
|
+
eventName = value;
|
|
131
|
+
}
|
|
132
|
+
else if (field === "id" && !value.includes("\u0000")) {
|
|
133
|
+
eventId = value;
|
|
134
|
+
}
|
|
135
|
+
else if (field === "retry") {
|
|
136
|
+
const retry = parseInt(value, 10);
|
|
137
|
+
if (!Number.isNaN(retry)) {
|
|
138
|
+
onParse({ type: "reconnect-interval", value: retry });
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
const BOM = [239, 187, 191];
|
|
144
|
+
function hasBom(buffer) {
|
|
145
|
+
return BOM.every((charCode, index) => buffer.charCodeAt(index) === charCode);
|
|
146
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.EventSourceParserStream = void 0;
|
|
4
|
+
const parse_js_1 = require("./parse.cjs");
|
|
5
|
+
/**
|
|
6
|
+
* A TransformStream that ingests a stream of strings and produces a stream of ParsedEvents.
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```
|
|
10
|
+
* const eventStream =
|
|
11
|
+
* response.body
|
|
12
|
+
* .pipeThrough(new TextDecoderStream())
|
|
13
|
+
* .pipeThrough(new EventSourceParserStream())
|
|
14
|
+
* ```
|
|
15
|
+
* @public
|
|
16
|
+
*/
|
|
17
|
+
class EventSourceParserStream extends TransformStream {
|
|
18
|
+
constructor() {
|
|
19
|
+
let parser;
|
|
20
|
+
super({
|
|
21
|
+
start(controller) {
|
|
22
|
+
parser = (0, parse_js_1.createParser)((event) => {
|
|
23
|
+
if (event.type === "event") {
|
|
24
|
+
controller.enqueue(event);
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
},
|
|
28
|
+
transform(chunk) {
|
|
29
|
+
parser.feed(chunk);
|
|
30
|
+
},
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
exports.EventSourceParserStream = EventSourceParserStream;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { ParsedEvent } from "./types.js";
|
|
2
|
+
/**
|
|
3
|
+
* A TransformStream that ingests a stream of strings and produces a stream of ParsedEvents.
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* ```
|
|
7
|
+
* const eventStream =
|
|
8
|
+
* response.body
|
|
9
|
+
* .pipeThrough(new TextDecoderStream())
|
|
10
|
+
* .pipeThrough(new EventSourceParserStream())
|
|
11
|
+
* ```
|
|
12
|
+
* @public
|
|
13
|
+
*/
|
|
14
|
+
export declare class EventSourceParserStream extends TransformStream<string, ParsedEvent> {
|
|
15
|
+
constructor();
|
|
16
|
+
}
|
|
17
|
+
export type { ParsedEvent } from "./types.js";
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { createParser } from "./parse.js";
|
|
2
|
+
/**
|
|
3
|
+
* A TransformStream that ingests a stream of strings and produces a stream of ParsedEvents.
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* ```
|
|
7
|
+
* const eventStream =
|
|
8
|
+
* response.body
|
|
9
|
+
* .pipeThrough(new TextDecoderStream())
|
|
10
|
+
* .pipeThrough(new EventSourceParserStream())
|
|
11
|
+
* ```
|
|
12
|
+
* @public
|
|
13
|
+
*/
|
|
14
|
+
export class EventSourceParserStream extends TransformStream {
|
|
15
|
+
constructor() {
|
|
16
|
+
let parser;
|
|
17
|
+
super({
|
|
18
|
+
start(controller) {
|
|
19
|
+
parser = createParser((event) => {
|
|
20
|
+
if (event.type === "event") {
|
|
21
|
+
controller.enqueue(event);
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
},
|
|
25
|
+
transform(chunk) {
|
|
26
|
+
parser.feed(chunk);
|
|
27
|
+
},
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* EventSource parser instance.
|
|
3
|
+
*
|
|
4
|
+
* Needs to be reset between reconnections/when switching data source, using the `reset()` method.
|
|
5
|
+
*
|
|
6
|
+
* @public
|
|
7
|
+
*/
|
|
8
|
+
export interface EventSourceParser {
|
|
9
|
+
/**
|
|
10
|
+
* Feeds the parser another chunk. The method _does not_ return a parsed message.
|
|
11
|
+
* Instead, if the chunk was a complete message (or completed a previously incomplete message),
|
|
12
|
+
* it will invoke the `onParse` callback used to create the parsers.
|
|
13
|
+
*
|
|
14
|
+
* @param chunk - The chunk to parse. Can be a partial, eg in the case of streaming messages.
|
|
15
|
+
* @public
|
|
16
|
+
*/
|
|
17
|
+
feed(chunk: string): void;
|
|
18
|
+
/**
|
|
19
|
+
* Resets the parser state. This is required when you have a new stream of messages -
|
|
20
|
+
* for instance in the case of a client being disconnected and reconnecting.
|
|
21
|
+
*
|
|
22
|
+
* @public
|
|
23
|
+
*/
|
|
24
|
+
reset(): void;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* A parsed EventSource event
|
|
28
|
+
*
|
|
29
|
+
* @public
|
|
30
|
+
*/
|
|
31
|
+
export interface ParsedEvent {
|
|
32
|
+
/**
|
|
33
|
+
* Differentiates the type from reconnection intervals and other types of messages
|
|
34
|
+
* Not to be confused with `event`.
|
|
35
|
+
*/
|
|
36
|
+
type: "event";
|
|
37
|
+
/**
|
|
38
|
+
* The event type sent from the server. Note that this differs from the browser `EventSource`
|
|
39
|
+
* implementation in that browsers will default this to `message`, whereas this parser will
|
|
40
|
+
* leave this as `undefined` if not explicitly declared.
|
|
41
|
+
*/
|
|
42
|
+
event?: string;
|
|
43
|
+
/**
|
|
44
|
+
* ID of the message, if any was provided by the server. Can be used by clients to keep the
|
|
45
|
+
* last received message ID in sync when reconnecting.
|
|
46
|
+
*/
|
|
47
|
+
id?: string;
|
|
48
|
+
/**
|
|
49
|
+
* The data received for this message
|
|
50
|
+
*/
|
|
51
|
+
data: string;
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* An event emitted from the parser when the server sends a value in the `retry` field,
|
|
55
|
+
* indicating how many seconds the client should wait before attempting to reconnect.
|
|
56
|
+
*
|
|
57
|
+
* @public
|
|
58
|
+
*/
|
|
59
|
+
export interface ReconnectInterval {
|
|
60
|
+
/**
|
|
61
|
+
* Differentiates the type from `event` and other types of messages
|
|
62
|
+
*/
|
|
63
|
+
type: "reconnect-interval";
|
|
64
|
+
/**
|
|
65
|
+
* Number of seconds to wait before reconnecting. Note that the parser does not care about
|
|
66
|
+
* this value at all - it only emits the value for clients to use.
|
|
67
|
+
*/
|
|
68
|
+
value: number;
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* The different types of messages the parsed can emit to the `onParse` callback
|
|
72
|
+
*
|
|
73
|
+
* @public
|
|
74
|
+
*/
|
|
75
|
+
export type ParseEvent = ParsedEvent | ReconnectInterval;
|
|
76
|
+
/**
|
|
77
|
+
* Callback passed as the `onParse` callback to a parser
|
|
78
|
+
*
|
|
79
|
+
* @public
|
|
80
|
+
*/
|
|
81
|
+
export type EventSourceParseCallback = (event: ParseEvent) => void;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.IterableReadableStream = void 0;
|
|
4
|
+
/*
|
|
5
|
+
* Support async iterator syntax for ReadableStreams in all environments.
|
|
6
|
+
* Source: https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490
|
|
7
|
+
*/
|
|
8
|
+
class IterableReadableStream extends ReadableStream {
|
|
9
|
+
constructor() {
|
|
10
|
+
super(...arguments);
|
|
11
|
+
Object.defineProperty(this, "reader", {
|
|
12
|
+
enumerable: true,
|
|
13
|
+
configurable: true,
|
|
14
|
+
writable: true,
|
|
15
|
+
value: void 0
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
ensureReader() {
|
|
19
|
+
if (!this.reader) {
|
|
20
|
+
this.reader = this.getReader();
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
async next() {
|
|
24
|
+
this.ensureReader();
|
|
25
|
+
try {
|
|
26
|
+
const result = await this.reader.read();
|
|
27
|
+
if (result.done) {
|
|
28
|
+
this.reader.releaseLock(); // release lock when stream becomes closed
|
|
29
|
+
return {
|
|
30
|
+
done: true,
|
|
31
|
+
value: undefined,
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
else {
|
|
35
|
+
return {
|
|
36
|
+
done: false,
|
|
37
|
+
value: result.value,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
catch (e) {
|
|
42
|
+
this.reader.releaseLock(); // release lock when stream becomes errored
|
|
43
|
+
throw e;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
async return() {
|
|
47
|
+
this.ensureReader();
|
|
48
|
+
// If wrapped in a Node stream, cancel is already called.
|
|
49
|
+
if (this.locked) {
|
|
50
|
+
const cancelPromise = this.reader.cancel(); // cancel first, but don't await yet
|
|
51
|
+
this.reader.releaseLock(); // release lock first
|
|
52
|
+
await cancelPromise; // now await it
|
|
53
|
+
}
|
|
54
|
+
return { done: true, value: undefined };
|
|
55
|
+
}
|
|
56
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
57
|
+
async throw(e) {
|
|
58
|
+
this.ensureReader();
|
|
59
|
+
if (this.locked) {
|
|
60
|
+
const cancelPromise = this.reader.cancel(); // cancel first, but don't await yet
|
|
61
|
+
this.reader.releaseLock(); // release lock first
|
|
62
|
+
await cancelPromise; // now await it
|
|
63
|
+
}
|
|
64
|
+
throw e;
|
|
65
|
+
}
|
|
66
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
67
|
+
// @ts-ignore Not present in Node 18 types, required in latest Node 22
|
|
68
|
+
async [Symbol.asyncDispose]() {
|
|
69
|
+
await this.return();
|
|
70
|
+
}
|
|
71
|
+
[Symbol.asyncIterator]() {
|
|
72
|
+
return this;
|
|
73
|
+
}
|
|
74
|
+
static fromReadableStream(stream) {
|
|
75
|
+
// From https://developer.mozilla.org/en-US/docs/Web/API/Streams_API/Using_readable_streams#reading_the_stream
|
|
76
|
+
const reader = stream.getReader();
|
|
77
|
+
return new IterableReadableStream({
|
|
78
|
+
start(controller) {
|
|
79
|
+
return pump();
|
|
80
|
+
function pump() {
|
|
81
|
+
return reader.read().then(({ done, value }) => {
|
|
82
|
+
// When no more data needs to be consumed, close the stream
|
|
83
|
+
if (done) {
|
|
84
|
+
controller.close();
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
// Enqueue the next data chunk into our target stream
|
|
88
|
+
controller.enqueue(value);
|
|
89
|
+
return pump();
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
},
|
|
93
|
+
cancel() {
|
|
94
|
+
reader.releaseLock();
|
|
95
|
+
},
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
static fromAsyncGenerator(generator) {
|
|
99
|
+
return new IterableReadableStream({
|
|
100
|
+
async pull(controller) {
|
|
101
|
+
const { value, done } = await generator.next();
|
|
102
|
+
// When no more data needs to be consumed, close the stream
|
|
103
|
+
if (done) {
|
|
104
|
+
controller.close();
|
|
105
|
+
}
|
|
106
|
+
// Fix: `else if (value)` will hang the streaming when nullish value (e.g. empty string) is pulled
|
|
107
|
+
controller.enqueue(value);
|
|
108
|
+
},
|
|
109
|
+
async cancel(reason) {
|
|
110
|
+
await generator.return(reason);
|
|
111
|
+
},
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
exports.IterableReadableStream = IterableReadableStream;
|
|
@@ -5,6 +5,7 @@ export declare class IterableReadableStream<T> extends ReadableStream<T> impleme
|
|
|
5
5
|
next(): Promise<IteratorResult<T>>;
|
|
6
6
|
return(): Promise<IteratorResult<T>>;
|
|
7
7
|
throw(e: any): Promise<IteratorResult<T>>;
|
|
8
|
+
[Symbol.asyncDispose](): Promise<void>;
|
|
8
9
|
[Symbol.asyncIterator](): this;
|
|
9
10
|
static fromReadableStream<T>(stream: ReadableStream<T>): IterableReadableStream<T>;
|
|
10
11
|
static fromAsyncGenerator<T>(generator: AsyncGenerator<T>): IterableReadableStream<T>;
|
|
@@ -60,6 +60,11 @@ export class IterableReadableStream extends ReadableStream {
|
|
|
60
60
|
}
|
|
61
61
|
throw e;
|
|
62
62
|
}
|
|
63
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
64
|
+
// @ts-ignore Not present in Node 18 types, required in latest Node 22
|
|
65
|
+
async [Symbol.asyncDispose]() {
|
|
66
|
+
await this.return();
|
|
67
|
+
}
|
|
63
68
|
[Symbol.asyncIterator]() {
|
|
64
69
|
return this;
|
|
65
70
|
}
|
package/index.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export * from './dist/index.
|
|
1
|
+
export * from './dist/index.js'
|
package/index.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export * from './dist/index.
|
|
1
|
+
export * from './dist/index.js'
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/langgraph-sdk",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.10-rc.0",
|
|
4
4
|
"description": "Client library for interacting with the LangGraph API",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"packageManager": "yarn@1.22.19",
|
|
@@ -17,7 +17,6 @@
|
|
|
17
17
|
"license": "MIT",
|
|
18
18
|
"dependencies": {
|
|
19
19
|
"@types/json-schema": "^7.0.15",
|
|
20
|
-
"eventsource-parser": "^1.1.2",
|
|
21
20
|
"p-queue": "^6.6.2",
|
|
22
21
|
"p-retry": "4",
|
|
23
22
|
"uuid": "^9.0.0"
|
package/dist/index.mjs
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export { Client } from "./client.mjs";
|
|
File without changes
|
|
File without changes
|