@langchain/langgraph-sdk 0.0.9 → 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +1 -1
- package/client.js +1 -1
- package/dist/client.cjs +677 -0
- package/dist/{client.d.mts → client.d.ts} +2 -2
- package/dist/{client.mjs → client.js} +3 -3
- package/dist/index.cjs +5 -0
- package/dist/{index.d.mts → index.d.ts} +1 -1
- package/dist/index.js +1 -0
- package/dist/types.cjs +2 -0
- package/dist/utils/async_caller.cjs +195 -0
- package/dist/utils/eventsource-parser/index.cjs +7 -0
- package/dist/utils/eventsource-parser/index.d.ts +2 -0
- package/dist/utils/eventsource-parser/index.js +3 -0
- package/dist/utils/eventsource-parser/parse.cjs +150 -0
- package/dist/utils/eventsource-parser/parse.d.ts +18 -0
- package/dist/utils/eventsource-parser/parse.js +146 -0
- package/dist/utils/eventsource-parser/stream.cjs +34 -0
- package/dist/utils/eventsource-parser/stream.d.ts +17 -0
- package/dist/utils/eventsource-parser/stream.js +30 -0
- package/dist/utils/eventsource-parser/types.cjs +2 -0
- package/dist/utils/eventsource-parser/types.d.ts +81 -0
- package/dist/utils/eventsource-parser/types.js +1 -0
- package/dist/utils/stream.cjs +115 -0
- package/dist/utils/{stream.d.mts → stream.d.ts} +1 -0
- package/dist/utils/{stream.mjs → stream.js} +5 -0
- package/index.d.ts +1 -1
- package/index.js +1 -1
- package/package.json +1 -2
- package/dist/index.mjs +0 -1
- /package/dist/{types.d.mts → types.d.ts} +0 -0
- /package/dist/{types.mjs → types.js} +0 -0
- /package/dist/utils/{async_caller.d.mts → async_caller.d.ts} +0 -0
- /package/dist/utils/{async_caller.mjs → async_caller.js} +0 -0
package/dist/index.cjs
ADDED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { Client } from "./client.
|
|
1
|
+
export { Client } from "./client.js";
|
|
2
2
|
export type { Assistant, AssistantGraph, Config, DefaultValues, GraphSchema, Metadata, Run, Thread, ThreadState, Cron, } from "./schema.js";
|
package/dist/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { Client } from "./client.js";
|
package/dist/types.cjs
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.AsyncCaller = void 0;
|
|
7
|
+
const p_retry_1 = __importDefault(require("p-retry"));
|
|
8
|
+
const p_queue_1 = __importDefault(require("p-queue"));
|
|
9
|
+
const STATUS_NO_RETRY = [
|
|
10
|
+
400, // Bad Request
|
|
11
|
+
401, // Unauthorized
|
|
12
|
+
403, // Forbidden
|
|
13
|
+
404, // Not Found
|
|
14
|
+
405, // Method Not Allowed
|
|
15
|
+
406, // Not Acceptable
|
|
16
|
+
407, // Proxy Authentication Required
|
|
17
|
+
408, // Request Timeout
|
|
18
|
+
422, // Unprocessable Entity
|
|
19
|
+
];
|
|
20
|
+
const STATUS_IGNORE = [
|
|
21
|
+
409, // Conflict
|
|
22
|
+
];
|
|
23
|
+
/**
|
|
24
|
+
* Do not rely on globalThis.Response, rather just
|
|
25
|
+
* do duck typing
|
|
26
|
+
*/
|
|
27
|
+
function isResponse(x) {
|
|
28
|
+
if (x == null || typeof x !== "object")
|
|
29
|
+
return false;
|
|
30
|
+
return "status" in x && "statusText" in x && "text" in x;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Utility error to properly handle failed requests
|
|
34
|
+
*/
|
|
35
|
+
class HTTPError extends Error {
|
|
36
|
+
constructor(status, message, response) {
|
|
37
|
+
super(`HTTP ${status}: ${message}`);
|
|
38
|
+
Object.defineProperty(this, "status", {
|
|
39
|
+
enumerable: true,
|
|
40
|
+
configurable: true,
|
|
41
|
+
writable: true,
|
|
42
|
+
value: void 0
|
|
43
|
+
});
|
|
44
|
+
Object.defineProperty(this, "text", {
|
|
45
|
+
enumerable: true,
|
|
46
|
+
configurable: true,
|
|
47
|
+
writable: true,
|
|
48
|
+
value: void 0
|
|
49
|
+
});
|
|
50
|
+
Object.defineProperty(this, "response", {
|
|
51
|
+
enumerable: true,
|
|
52
|
+
configurable: true,
|
|
53
|
+
writable: true,
|
|
54
|
+
value: void 0
|
|
55
|
+
});
|
|
56
|
+
this.status = status;
|
|
57
|
+
this.text = message;
|
|
58
|
+
this.response = response;
|
|
59
|
+
}
|
|
60
|
+
static async fromResponse(response, options) {
|
|
61
|
+
try {
|
|
62
|
+
return new HTTPError(response.status, await response.text(), options?.includeResponse ? response : undefined);
|
|
63
|
+
}
|
|
64
|
+
catch {
|
|
65
|
+
return new HTTPError(response.status, response.statusText, options?.includeResponse ? response : undefined);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* A class that can be used to make async calls with concurrency and retry logic.
|
|
71
|
+
*
|
|
72
|
+
* This is useful for making calls to any kind of "expensive" external resource,
|
|
73
|
+
* be it because it's rate-limited, subject to network issues, etc.
|
|
74
|
+
*
|
|
75
|
+
* Concurrent calls are limited by the `maxConcurrency` parameter, which defaults
|
|
76
|
+
* to `Infinity`. This means that by default, all calls will be made in parallel.
|
|
77
|
+
*
|
|
78
|
+
* Retries are limited by the `maxRetries` parameter, which defaults to 5. This
|
|
79
|
+
* means that by default, each call will be retried up to 5 times, with an
|
|
80
|
+
* exponential backoff between each attempt.
|
|
81
|
+
*/
|
|
82
|
+
class AsyncCaller {
|
|
83
|
+
constructor(params) {
|
|
84
|
+
Object.defineProperty(this, "maxConcurrency", {
|
|
85
|
+
enumerable: true,
|
|
86
|
+
configurable: true,
|
|
87
|
+
writable: true,
|
|
88
|
+
value: void 0
|
|
89
|
+
});
|
|
90
|
+
Object.defineProperty(this, "maxRetries", {
|
|
91
|
+
enumerable: true,
|
|
92
|
+
configurable: true,
|
|
93
|
+
writable: true,
|
|
94
|
+
value: void 0
|
|
95
|
+
});
|
|
96
|
+
Object.defineProperty(this, "queue", {
|
|
97
|
+
enumerable: true,
|
|
98
|
+
configurable: true,
|
|
99
|
+
writable: true,
|
|
100
|
+
value: void 0
|
|
101
|
+
});
|
|
102
|
+
Object.defineProperty(this, "onFailedResponseHook", {
|
|
103
|
+
enumerable: true,
|
|
104
|
+
configurable: true,
|
|
105
|
+
writable: true,
|
|
106
|
+
value: void 0
|
|
107
|
+
});
|
|
108
|
+
Object.defineProperty(this, "customFetch", {
|
|
109
|
+
enumerable: true,
|
|
110
|
+
configurable: true,
|
|
111
|
+
writable: true,
|
|
112
|
+
value: void 0
|
|
113
|
+
});
|
|
114
|
+
this.maxConcurrency = params.maxConcurrency ?? Infinity;
|
|
115
|
+
this.maxRetries = params.maxRetries ?? 4;
|
|
116
|
+
if ("default" in p_queue_1.default) {
|
|
117
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
118
|
+
this.queue = new p_queue_1.default.default({
|
|
119
|
+
concurrency: this.maxConcurrency,
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
else {
|
|
123
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
124
|
+
this.queue = new p_queue_1.default({ concurrency: this.maxConcurrency });
|
|
125
|
+
}
|
|
126
|
+
this.onFailedResponseHook = params?.onFailedResponseHook;
|
|
127
|
+
this.customFetch = params.fetch;
|
|
128
|
+
}
|
|
129
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
130
|
+
call(callable, ...args) {
|
|
131
|
+
const onFailedResponseHook = this.onFailedResponseHook;
|
|
132
|
+
return this.queue.add(() => (0, p_retry_1.default)(() => callable(...args).catch(async (error) => {
|
|
133
|
+
// eslint-disable-next-line no-instanceof/no-instanceof
|
|
134
|
+
if (error instanceof Error) {
|
|
135
|
+
throw error;
|
|
136
|
+
}
|
|
137
|
+
else if (isResponse(error)) {
|
|
138
|
+
throw await HTTPError.fromResponse(error, {
|
|
139
|
+
includeResponse: !!onFailedResponseHook,
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
else {
|
|
143
|
+
throw new Error(error);
|
|
144
|
+
}
|
|
145
|
+
}), {
|
|
146
|
+
async onFailedAttempt(error) {
|
|
147
|
+
if (error.message.startsWith("Cancel") ||
|
|
148
|
+
error.message.startsWith("TimeoutError") ||
|
|
149
|
+
error.message.startsWith("AbortError")) {
|
|
150
|
+
throw error;
|
|
151
|
+
}
|
|
152
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
153
|
+
if (error?.code === "ECONNABORTED") {
|
|
154
|
+
throw error;
|
|
155
|
+
}
|
|
156
|
+
if (error instanceof HTTPError) {
|
|
157
|
+
if (STATUS_NO_RETRY.includes(error.status)) {
|
|
158
|
+
throw error;
|
|
159
|
+
}
|
|
160
|
+
else if (STATUS_IGNORE.includes(error.status)) {
|
|
161
|
+
return;
|
|
162
|
+
}
|
|
163
|
+
if (onFailedResponseHook && error.response) {
|
|
164
|
+
await onFailedResponseHook(error.response);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
},
|
|
168
|
+
// If needed we can change some of the defaults here,
|
|
169
|
+
// but they're quite sensible.
|
|
170
|
+
retries: this.maxRetries,
|
|
171
|
+
randomize: true,
|
|
172
|
+
}), { throwOnTimeout: true });
|
|
173
|
+
}
|
|
174
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
175
|
+
callWithOptions(options, callable, ...args) {
|
|
176
|
+
// Note this doesn't cancel the underlying request,
|
|
177
|
+
// when available prefer to use the signal option of the underlying call
|
|
178
|
+
if (options.signal) {
|
|
179
|
+
return Promise.race([
|
|
180
|
+
this.call(callable, ...args),
|
|
181
|
+
new Promise((_, reject) => {
|
|
182
|
+
options.signal?.addEventListener("abort", () => {
|
|
183
|
+
reject(new Error("AbortError"));
|
|
184
|
+
});
|
|
185
|
+
}),
|
|
186
|
+
]);
|
|
187
|
+
}
|
|
188
|
+
return this.call(callable, ...args);
|
|
189
|
+
}
|
|
190
|
+
fetch(...args) {
|
|
191
|
+
const fetchFn = this.customFetch ?? fetch;
|
|
192
|
+
return this.call(() => fetchFn(...args).then((res) => (res.ok ? res : Promise.reject(res))));
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
exports.AsyncCaller = AsyncCaller;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// From https://github.com/rexxars/eventsource-parser
|
|
3
|
+
// Inlined due to CJS import issues
|
|
4
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
+
exports.createParser = void 0;
|
|
6
|
+
var parse_js_1 = require("./parse.cjs");
|
|
7
|
+
Object.defineProperty(exports, "createParser", { enumerable: true, get: function () { return parse_js_1.createParser; } });
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createParser = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Creates a new EventSource parser.
|
|
6
|
+
*
|
|
7
|
+
* @param onParse - Callback to invoke when a new event is parsed, or a new reconnection interval
|
|
8
|
+
* has been sent from the server
|
|
9
|
+
*
|
|
10
|
+
* @returns A new EventSource parser, with `parse` and `reset` methods.
|
|
11
|
+
* @public
|
|
12
|
+
*/
|
|
13
|
+
function createParser(onParse) {
|
|
14
|
+
// Processing state
|
|
15
|
+
let isFirstChunk;
|
|
16
|
+
let buffer;
|
|
17
|
+
let startingPosition;
|
|
18
|
+
let startingFieldLength;
|
|
19
|
+
// Event state
|
|
20
|
+
let eventId;
|
|
21
|
+
let eventName;
|
|
22
|
+
let data;
|
|
23
|
+
reset();
|
|
24
|
+
return { feed, reset };
|
|
25
|
+
function reset() {
|
|
26
|
+
isFirstChunk = true;
|
|
27
|
+
buffer = "";
|
|
28
|
+
startingPosition = 0;
|
|
29
|
+
startingFieldLength = -1;
|
|
30
|
+
eventId = undefined;
|
|
31
|
+
eventName = undefined;
|
|
32
|
+
data = "";
|
|
33
|
+
}
|
|
34
|
+
function feed(chunk) {
|
|
35
|
+
buffer = buffer ? buffer + chunk : chunk;
|
|
36
|
+
// Strip any UTF8 byte order mark (BOM) at the start of the stream.
|
|
37
|
+
// Note that we do not strip any non - UTF8 BOM, as eventsource streams are
|
|
38
|
+
// always decoded as UTF8 as per the specification.
|
|
39
|
+
if (isFirstChunk && hasBom(buffer)) {
|
|
40
|
+
buffer = buffer.slice(BOM.length);
|
|
41
|
+
}
|
|
42
|
+
isFirstChunk = false;
|
|
43
|
+
// Set up chunk-specific processing state
|
|
44
|
+
const length = buffer.length;
|
|
45
|
+
let position = 0;
|
|
46
|
+
let discardTrailingNewline = false;
|
|
47
|
+
// Read the current buffer byte by byte
|
|
48
|
+
while (position < length) {
|
|
49
|
+
// EventSource allows for carriage return + line feed, which means we
|
|
50
|
+
// need to ignore a linefeed character if the previous character was a
|
|
51
|
+
// carriage return
|
|
52
|
+
// @todo refactor to reduce nesting, consider checking previous byte?
|
|
53
|
+
// @todo but consider multiple chunks etc
|
|
54
|
+
if (discardTrailingNewline) {
|
|
55
|
+
if (buffer[position] === "\n") {
|
|
56
|
+
++position;
|
|
57
|
+
}
|
|
58
|
+
discardTrailingNewline = false;
|
|
59
|
+
}
|
|
60
|
+
let lineLength = -1;
|
|
61
|
+
let fieldLength = startingFieldLength;
|
|
62
|
+
let character;
|
|
63
|
+
for (let index = startingPosition; lineLength < 0 && index < length; ++index) {
|
|
64
|
+
character = buffer[index];
|
|
65
|
+
if (character === ":" && fieldLength < 0) {
|
|
66
|
+
fieldLength = index - position;
|
|
67
|
+
}
|
|
68
|
+
else if (character === "\r") {
|
|
69
|
+
discardTrailingNewline = true;
|
|
70
|
+
lineLength = index - position;
|
|
71
|
+
}
|
|
72
|
+
else if (character === "\n") {
|
|
73
|
+
lineLength = index - position;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
if (lineLength < 0) {
|
|
77
|
+
startingPosition = length - position;
|
|
78
|
+
startingFieldLength = fieldLength;
|
|
79
|
+
break;
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
startingPosition = 0;
|
|
83
|
+
startingFieldLength = -1;
|
|
84
|
+
}
|
|
85
|
+
parseEventStreamLine(buffer, position, fieldLength, lineLength);
|
|
86
|
+
position += lineLength + 1;
|
|
87
|
+
}
|
|
88
|
+
if (position === length) {
|
|
89
|
+
// If we consumed the entire buffer to read the event, reset the buffer
|
|
90
|
+
buffer = "";
|
|
91
|
+
}
|
|
92
|
+
else if (position > 0) {
|
|
93
|
+
// If there are bytes left to process, set the buffer to the unprocessed
|
|
94
|
+
// portion of the buffer only
|
|
95
|
+
buffer = buffer.slice(position);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
function parseEventStreamLine(lineBuffer, index, fieldLength, lineLength) {
|
|
99
|
+
if (lineLength === 0) {
|
|
100
|
+
// We reached the last line of this event
|
|
101
|
+
if (data.length > 0) {
|
|
102
|
+
onParse({
|
|
103
|
+
type: "event",
|
|
104
|
+
id: eventId,
|
|
105
|
+
event: eventName || undefined,
|
|
106
|
+
data: data.slice(0, -1), // remove trailing newline
|
|
107
|
+
});
|
|
108
|
+
data = "";
|
|
109
|
+
eventId = undefined;
|
|
110
|
+
}
|
|
111
|
+
eventName = undefined;
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
const noValue = fieldLength < 0;
|
|
115
|
+
const field = lineBuffer.slice(index, index + (noValue ? lineLength : fieldLength));
|
|
116
|
+
let step = 0;
|
|
117
|
+
if (noValue) {
|
|
118
|
+
step = lineLength;
|
|
119
|
+
}
|
|
120
|
+
else if (lineBuffer[index + fieldLength + 1] === " ") {
|
|
121
|
+
step = fieldLength + 2;
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
step = fieldLength + 1;
|
|
125
|
+
}
|
|
126
|
+
const position = index + step;
|
|
127
|
+
const valueLength = lineLength - step;
|
|
128
|
+
const value = lineBuffer.slice(position, position + valueLength).toString();
|
|
129
|
+
if (field === "data") {
|
|
130
|
+
data += value ? `${value}\n` : "\n";
|
|
131
|
+
}
|
|
132
|
+
else if (field === "event") {
|
|
133
|
+
eventName = value;
|
|
134
|
+
}
|
|
135
|
+
else if (field === "id" && !value.includes("\u0000")) {
|
|
136
|
+
eventId = value;
|
|
137
|
+
}
|
|
138
|
+
else if (field === "retry") {
|
|
139
|
+
const retry = parseInt(value, 10);
|
|
140
|
+
if (!Number.isNaN(retry)) {
|
|
141
|
+
onParse({ type: "reconnect-interval", value: retry });
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
exports.createParser = createParser;
|
|
147
|
+
const BOM = [239, 187, 191];
|
|
148
|
+
function hasBom(buffer) {
|
|
149
|
+
return BOM.every((charCode, index) => buffer.charCodeAt(index) === charCode);
|
|
150
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* EventSource/Server-Sent Events parser
|
|
3
|
+
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html
|
|
4
|
+
*
|
|
5
|
+
* Based on code from the {@link https://github.com/EventSource/eventsource | EventSource module},
|
|
6
|
+
* which is licensed under the MIT license. And copyrighted the EventSource GitHub organisation.
|
|
7
|
+
*/
|
|
8
|
+
import type { EventSourceParseCallback, EventSourceParser } from "./types.js";
|
|
9
|
+
/**
|
|
10
|
+
* Creates a new EventSource parser.
|
|
11
|
+
*
|
|
12
|
+
* @param onParse - Callback to invoke when a new event is parsed, or a new reconnection interval
|
|
13
|
+
* has been sent from the server
|
|
14
|
+
*
|
|
15
|
+
* @returns A new EventSource parser, with `parse` and `reset` methods.
|
|
16
|
+
* @public
|
|
17
|
+
*/
|
|
18
|
+
export declare function createParser(onParse: EventSourceParseCallback): EventSourceParser;
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Creates a new EventSource parser.
|
|
3
|
+
*
|
|
4
|
+
* @param onParse - Callback to invoke when a new event is parsed, or a new reconnection interval
|
|
5
|
+
* has been sent from the server
|
|
6
|
+
*
|
|
7
|
+
* @returns A new EventSource parser, with `parse` and `reset` methods.
|
|
8
|
+
* @public
|
|
9
|
+
*/
|
|
10
|
+
export function createParser(onParse) {
|
|
11
|
+
// Processing state
|
|
12
|
+
let isFirstChunk;
|
|
13
|
+
let buffer;
|
|
14
|
+
let startingPosition;
|
|
15
|
+
let startingFieldLength;
|
|
16
|
+
// Event state
|
|
17
|
+
let eventId;
|
|
18
|
+
let eventName;
|
|
19
|
+
let data;
|
|
20
|
+
reset();
|
|
21
|
+
return { feed, reset };
|
|
22
|
+
function reset() {
|
|
23
|
+
isFirstChunk = true;
|
|
24
|
+
buffer = "";
|
|
25
|
+
startingPosition = 0;
|
|
26
|
+
startingFieldLength = -1;
|
|
27
|
+
eventId = undefined;
|
|
28
|
+
eventName = undefined;
|
|
29
|
+
data = "";
|
|
30
|
+
}
|
|
31
|
+
function feed(chunk) {
|
|
32
|
+
buffer = buffer ? buffer + chunk : chunk;
|
|
33
|
+
// Strip any UTF8 byte order mark (BOM) at the start of the stream.
|
|
34
|
+
// Note that we do not strip any non - UTF8 BOM, as eventsource streams are
|
|
35
|
+
// always decoded as UTF8 as per the specification.
|
|
36
|
+
if (isFirstChunk && hasBom(buffer)) {
|
|
37
|
+
buffer = buffer.slice(BOM.length);
|
|
38
|
+
}
|
|
39
|
+
isFirstChunk = false;
|
|
40
|
+
// Set up chunk-specific processing state
|
|
41
|
+
const length = buffer.length;
|
|
42
|
+
let position = 0;
|
|
43
|
+
let discardTrailingNewline = false;
|
|
44
|
+
// Read the current buffer byte by byte
|
|
45
|
+
while (position < length) {
|
|
46
|
+
// EventSource allows for carriage return + line feed, which means we
|
|
47
|
+
// need to ignore a linefeed character if the previous character was a
|
|
48
|
+
// carriage return
|
|
49
|
+
// @todo refactor to reduce nesting, consider checking previous byte?
|
|
50
|
+
// @todo but consider multiple chunks etc
|
|
51
|
+
if (discardTrailingNewline) {
|
|
52
|
+
if (buffer[position] === "\n") {
|
|
53
|
+
++position;
|
|
54
|
+
}
|
|
55
|
+
discardTrailingNewline = false;
|
|
56
|
+
}
|
|
57
|
+
let lineLength = -1;
|
|
58
|
+
let fieldLength = startingFieldLength;
|
|
59
|
+
let character;
|
|
60
|
+
for (let index = startingPosition; lineLength < 0 && index < length; ++index) {
|
|
61
|
+
character = buffer[index];
|
|
62
|
+
if (character === ":" && fieldLength < 0) {
|
|
63
|
+
fieldLength = index - position;
|
|
64
|
+
}
|
|
65
|
+
else if (character === "\r") {
|
|
66
|
+
discardTrailingNewline = true;
|
|
67
|
+
lineLength = index - position;
|
|
68
|
+
}
|
|
69
|
+
else if (character === "\n") {
|
|
70
|
+
lineLength = index - position;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
if (lineLength < 0) {
|
|
74
|
+
startingPosition = length - position;
|
|
75
|
+
startingFieldLength = fieldLength;
|
|
76
|
+
break;
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
startingPosition = 0;
|
|
80
|
+
startingFieldLength = -1;
|
|
81
|
+
}
|
|
82
|
+
parseEventStreamLine(buffer, position, fieldLength, lineLength);
|
|
83
|
+
position += lineLength + 1;
|
|
84
|
+
}
|
|
85
|
+
if (position === length) {
|
|
86
|
+
// If we consumed the entire buffer to read the event, reset the buffer
|
|
87
|
+
buffer = "";
|
|
88
|
+
}
|
|
89
|
+
else if (position > 0) {
|
|
90
|
+
// If there are bytes left to process, set the buffer to the unprocessed
|
|
91
|
+
// portion of the buffer only
|
|
92
|
+
buffer = buffer.slice(position);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
function parseEventStreamLine(lineBuffer, index, fieldLength, lineLength) {
|
|
96
|
+
if (lineLength === 0) {
|
|
97
|
+
// We reached the last line of this event
|
|
98
|
+
if (data.length > 0) {
|
|
99
|
+
onParse({
|
|
100
|
+
type: "event",
|
|
101
|
+
id: eventId,
|
|
102
|
+
event: eventName || undefined,
|
|
103
|
+
data: data.slice(0, -1), // remove trailing newline
|
|
104
|
+
});
|
|
105
|
+
data = "";
|
|
106
|
+
eventId = undefined;
|
|
107
|
+
}
|
|
108
|
+
eventName = undefined;
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
const noValue = fieldLength < 0;
|
|
112
|
+
const field = lineBuffer.slice(index, index + (noValue ? lineLength : fieldLength));
|
|
113
|
+
let step = 0;
|
|
114
|
+
if (noValue) {
|
|
115
|
+
step = lineLength;
|
|
116
|
+
}
|
|
117
|
+
else if (lineBuffer[index + fieldLength + 1] === " ") {
|
|
118
|
+
step = fieldLength + 2;
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
step = fieldLength + 1;
|
|
122
|
+
}
|
|
123
|
+
const position = index + step;
|
|
124
|
+
const valueLength = lineLength - step;
|
|
125
|
+
const value = lineBuffer.slice(position, position + valueLength).toString();
|
|
126
|
+
if (field === "data") {
|
|
127
|
+
data += value ? `${value}\n` : "\n";
|
|
128
|
+
}
|
|
129
|
+
else if (field === "event") {
|
|
130
|
+
eventName = value;
|
|
131
|
+
}
|
|
132
|
+
else if (field === "id" && !value.includes("\u0000")) {
|
|
133
|
+
eventId = value;
|
|
134
|
+
}
|
|
135
|
+
else if (field === "retry") {
|
|
136
|
+
const retry = parseInt(value, 10);
|
|
137
|
+
if (!Number.isNaN(retry)) {
|
|
138
|
+
onParse({ type: "reconnect-interval", value: retry });
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
const BOM = [239, 187, 191];
|
|
144
|
+
function hasBom(buffer) {
|
|
145
|
+
return BOM.every((charCode, index) => buffer.charCodeAt(index) === charCode);
|
|
146
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.EventSourceParserStream = void 0;
|
|
4
|
+
const parse_js_1 = require("./parse.cjs");
|
|
5
|
+
/**
|
|
6
|
+
* A TransformStream that ingests a stream of strings and produces a stream of ParsedEvents.
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```
|
|
10
|
+
* const eventStream =
|
|
11
|
+
* response.body
|
|
12
|
+
* .pipeThrough(new TextDecoderStream())
|
|
13
|
+
* .pipeThrough(new EventSourceParserStream())
|
|
14
|
+
* ```
|
|
15
|
+
* @public
|
|
16
|
+
*/
|
|
17
|
+
class EventSourceParserStream extends TransformStream {
|
|
18
|
+
constructor() {
|
|
19
|
+
let parser;
|
|
20
|
+
super({
|
|
21
|
+
start(controller) {
|
|
22
|
+
parser = (0, parse_js_1.createParser)((event) => {
|
|
23
|
+
if (event.type === "event") {
|
|
24
|
+
controller.enqueue(event);
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
},
|
|
28
|
+
transform(chunk) {
|
|
29
|
+
parser.feed(chunk);
|
|
30
|
+
},
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
exports.EventSourceParserStream = EventSourceParserStream;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { ParsedEvent } from "./types.js";
|
|
2
|
+
/**
|
|
3
|
+
* A TransformStream that ingests a stream of strings and produces a stream of ParsedEvents.
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* ```
|
|
7
|
+
* const eventStream =
|
|
8
|
+
* response.body
|
|
9
|
+
* .pipeThrough(new TextDecoderStream())
|
|
10
|
+
* .pipeThrough(new EventSourceParserStream())
|
|
11
|
+
* ```
|
|
12
|
+
* @public
|
|
13
|
+
*/
|
|
14
|
+
export declare class EventSourceParserStream extends TransformStream<string, ParsedEvent> {
|
|
15
|
+
constructor();
|
|
16
|
+
}
|
|
17
|
+
export type { ParsedEvent } from "./types.js";
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { createParser } from "./parse.js";
|
|
2
|
+
/**
|
|
3
|
+
* A TransformStream that ingests a stream of strings and produces a stream of ParsedEvents.
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* ```
|
|
7
|
+
* const eventStream =
|
|
8
|
+
* response.body
|
|
9
|
+
* .pipeThrough(new TextDecoderStream())
|
|
10
|
+
* .pipeThrough(new EventSourceParserStream())
|
|
11
|
+
* ```
|
|
12
|
+
* @public
|
|
13
|
+
*/
|
|
14
|
+
export class EventSourceParserStream extends TransformStream {
|
|
15
|
+
constructor() {
|
|
16
|
+
let parser;
|
|
17
|
+
super({
|
|
18
|
+
start(controller) {
|
|
19
|
+
parser = createParser((event) => {
|
|
20
|
+
if (event.type === "event") {
|
|
21
|
+
controller.enqueue(event);
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
},
|
|
25
|
+
transform(chunk) {
|
|
26
|
+
parser.feed(chunk);
|
|
27
|
+
},
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
}
|