@langchain/langgraph-sdk 0.0.8 → 0.0.10-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +1 -1
- package/client.js +1 -1
- package/dist/client.cjs +677 -0
- package/dist/{client.d.mts → client.d.ts} +30 -5
- package/dist/{client.mjs → client.js} +81 -12
- package/dist/index.cjs +5 -0
- package/dist/{index.d.mts → index.d.ts} +1 -1
- package/dist/index.js +1 -0
- package/dist/types.cjs +2 -0
- package/dist/{types.d.mts → types.d.ts} +20 -4
- package/dist/utils/async_caller.cjs +195 -0
- package/dist/utils/{async_caller.d.mts → async_caller.d.ts} +1 -1
- package/dist/utils/eventsource-parser/index.cjs +7 -0
- package/dist/utils/eventsource-parser/index.d.ts +2 -0
- package/dist/utils/eventsource-parser/index.js +3 -0
- package/dist/utils/eventsource-parser/parse.cjs +150 -0
- package/dist/utils/eventsource-parser/parse.d.ts +18 -0
- package/dist/utils/eventsource-parser/parse.js +146 -0
- package/dist/utils/eventsource-parser/stream.cjs +34 -0
- package/dist/utils/eventsource-parser/stream.d.ts +17 -0
- package/dist/utils/eventsource-parser/stream.js +30 -0
- package/dist/utils/eventsource-parser/types.cjs +2 -0
- package/dist/utils/eventsource-parser/types.d.ts +81 -0
- package/dist/utils/eventsource-parser/types.js +1 -0
- package/dist/utils/stream.cjs +115 -0
- package/dist/utils/{stream.d.mts → stream.d.ts} +1 -0
- package/dist/utils/{stream.mjs → stream.js} +5 -0
- package/index.d.ts +1 -1
- package/index.js +1 -1
- package/package.json +1 -2
- package/dist/index.mjs +0 -1
- /package/dist/{types.mjs → types.js} +0 -0
- /package/dist/utils/{async_caller.mjs → async_caller.js} +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Assistant, AssistantGraph, Config, DefaultValues, GraphSchema, Metadata, Run, Thread, ThreadState, Cron } from "./schema.js";
|
|
2
|
-
import { AsyncCaller, AsyncCallerParams } from "./utils/async_caller.
|
|
3
|
-
import { RunsCreatePayload, RunsStreamPayload, RunsWaitPayload, StreamEvent, CronsCreatePayload, OnConflictBehavior } from "./types.
|
|
2
|
+
import { AsyncCaller, AsyncCallerParams } from "./utils/async_caller.js";
|
|
3
|
+
import { RunsCreatePayload, RunsStreamPayload, RunsWaitPayload, StreamEvent, CronsCreatePayload, OnConflictBehavior } from "./types.js";
|
|
4
4
|
interface ClientConfig {
|
|
5
5
|
apiUrl?: string;
|
|
6
6
|
apiKey?: string;
|
|
@@ -85,6 +85,8 @@ export declare class AssistantsClient extends BaseClient {
|
|
|
85
85
|
graphId: string;
|
|
86
86
|
config?: Config;
|
|
87
87
|
metadata?: Metadata;
|
|
88
|
+
assistantId?: string;
|
|
89
|
+
ifExists?: OnConflictBehavior;
|
|
88
90
|
}): Promise<Assistant>;
|
|
89
91
|
/**
|
|
90
92
|
* Update an assistant.
|
|
@@ -93,7 +95,7 @@ export declare class AssistantsClient extends BaseClient {
|
|
|
93
95
|
* @returns The updated assistant.
|
|
94
96
|
*/
|
|
95
97
|
update(assistantId: string, payload: {
|
|
96
|
-
graphId
|
|
98
|
+
graphId?: string;
|
|
97
99
|
config?: Config;
|
|
98
100
|
metadata?: Metadata;
|
|
99
101
|
}): Promise<Assistant>;
|
|
@@ -222,7 +224,7 @@ export declare class ThreadsClient extends BaseClient {
|
|
|
222
224
|
}): Promise<ThreadState<ValuesType>[]>;
|
|
223
225
|
}
|
|
224
226
|
export declare class RunsClient extends BaseClient {
|
|
225
|
-
stream(threadId: null, assistantId: string, payload?: Omit<RunsStreamPayload, "multitaskStrategy">): AsyncGenerator<{
|
|
227
|
+
stream(threadId: null, assistantId: string, payload?: Omit<RunsStreamPayload, "multitaskStrategy" | "onCompletion">): AsyncGenerator<{
|
|
226
228
|
event: StreamEvent;
|
|
227
229
|
data: any;
|
|
228
230
|
}>;
|
|
@@ -239,7 +241,16 @@ export declare class RunsClient extends BaseClient {
|
|
|
239
241
|
* @returns The created run.
|
|
240
242
|
*/
|
|
241
243
|
create(threadId: string, assistantId: string, payload?: RunsCreatePayload): Promise<Run>;
|
|
242
|
-
|
|
244
|
+
/**
|
|
245
|
+
* Create a batch of stateless background runs.
|
|
246
|
+
*
|
|
247
|
+
* @param payloads An array of payloads for creating runs.
|
|
248
|
+
* @returns An array of created runs.
|
|
249
|
+
*/
|
|
250
|
+
createBatch(payloads: (RunsCreatePayload & {
|
|
251
|
+
assistantId: string;
|
|
252
|
+
})[]): Promise<Run[]>;
|
|
253
|
+
wait(threadId: null, assistantId: string, payload?: Omit<RunsWaitPayload, "multitaskStrategy" | "onCompletion">): Promise<ThreadState["values"]>;
|
|
243
254
|
wait(threadId: string, assistantId: string, payload?: RunsWaitPayload): Promise<ThreadState["values"]>;
|
|
244
255
|
/**
|
|
245
256
|
* List all runs for a thread.
|
|
@@ -285,6 +296,20 @@ export declare class RunsClient extends BaseClient {
|
|
|
285
296
|
* @returns
|
|
286
297
|
*/
|
|
287
298
|
join(threadId: string, runId: string): Promise<void>;
|
|
299
|
+
/**
|
|
300
|
+
* Stream output from a run in real-time, until the run is done.
|
|
301
|
+
* Output is not buffered, so any output produced before this call will
|
|
302
|
+
* not be received here.
|
|
303
|
+
*
|
|
304
|
+
* @param threadId The ID of the thread.
|
|
305
|
+
* @param runId The ID of the run.
|
|
306
|
+
* @param signal An optional abort signal.
|
|
307
|
+
* @returns An async generator yielding stream parts.
|
|
308
|
+
*/
|
|
309
|
+
joinStream(threadId: string, runId: string, signal?: AbortSignal): AsyncGenerator<{
|
|
310
|
+
event: StreamEvent;
|
|
311
|
+
data: any;
|
|
312
|
+
}>;
|
|
288
313
|
/**
|
|
289
314
|
* Delete a run.
|
|
290
315
|
*
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { AsyncCaller } from "./utils/async_caller.
|
|
2
|
-
import { createParser } from "eventsource-parser";
|
|
3
|
-
import { IterableReadableStream } from "./utils/stream.
|
|
1
|
+
import { AsyncCaller } from "./utils/async_caller.js";
|
|
2
|
+
import { createParser, } from "./utils/eventsource-parser/index.js";
|
|
3
|
+
import { IterableReadableStream } from "./utils/stream.js";
|
|
4
4
|
class BaseClient {
|
|
5
5
|
constructor(config) {
|
|
6
6
|
Object.defineProperty(this, "asyncCaller", {
|
|
@@ -92,6 +92,7 @@ export class CronsClient extends BaseClient {
|
|
|
92
92
|
interrupt_before: payload?.interruptBefore,
|
|
93
93
|
interrupt_after: payload?.interruptAfter,
|
|
94
94
|
webhook: payload?.webhook,
|
|
95
|
+
multitask_strategy: payload?.multitaskStrategy,
|
|
95
96
|
};
|
|
96
97
|
return this.fetch(`/threads/${threadId}/runs/crons`, {
|
|
97
98
|
method: "POST",
|
|
@@ -114,6 +115,7 @@ export class CronsClient extends BaseClient {
|
|
|
114
115
|
interrupt_before: payload?.interruptBefore,
|
|
115
116
|
interrupt_after: payload?.interruptAfter,
|
|
116
117
|
webhook: payload?.webhook,
|
|
118
|
+
multitask_strategy: payload?.multitaskStrategy,
|
|
117
119
|
};
|
|
118
120
|
return this.fetch(`/runs/crons`, {
|
|
119
121
|
method: "POST",
|
|
@@ -184,6 +186,8 @@ export class AssistantsClient extends BaseClient {
|
|
|
184
186
|
graph_id: payload.graphId,
|
|
185
187
|
config: payload.config,
|
|
186
188
|
metadata: payload.metadata,
|
|
189
|
+
assistant_id: payload.assistantId,
|
|
190
|
+
if_exists: payload.ifExists,
|
|
187
191
|
},
|
|
188
192
|
});
|
|
189
193
|
}
|
|
@@ -391,10 +395,11 @@ export class RunsClient extends BaseClient {
|
|
|
391
395
|
interrupt_before: payload?.interruptBefore,
|
|
392
396
|
interrupt_after: payload?.interruptAfter,
|
|
393
397
|
checkpoint_id: payload?.checkpointId,
|
|
398
|
+
webhook: payload?.webhook,
|
|
399
|
+
multitask_strategy: payload?.multitaskStrategy,
|
|
400
|
+
on_completion: payload?.onCompletion,
|
|
401
|
+
on_disconnect: payload?.onDisconnect,
|
|
394
402
|
};
|
|
395
|
-
if (payload?.multitaskStrategy != null) {
|
|
396
|
-
json["multitask_strategy"] = payload?.multitaskStrategy;
|
|
397
|
-
}
|
|
398
403
|
const endpoint = threadId == null ? `/runs/stream` : `/threads/${threadId}/runs/stream`;
|
|
399
404
|
const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(endpoint, {
|
|
400
405
|
method: "POST",
|
|
@@ -452,16 +457,31 @@ export class RunsClient extends BaseClient {
|
|
|
452
457
|
interrupt_after: payload?.interruptAfter,
|
|
453
458
|
webhook: payload?.webhook,
|
|
454
459
|
checkpoint_id: payload?.checkpointId,
|
|
460
|
+
multitask_strategy: payload?.multitaskStrategy,
|
|
455
461
|
};
|
|
456
|
-
if (payload?.multitaskStrategy != null) {
|
|
457
|
-
json["multitask_strategy"] = payload?.multitaskStrategy;
|
|
458
|
-
}
|
|
459
462
|
return this.fetch(`/threads/${threadId}/runs`, {
|
|
460
463
|
method: "POST",
|
|
461
464
|
json,
|
|
462
465
|
signal: payload?.signal,
|
|
463
466
|
});
|
|
464
467
|
}
|
|
468
|
+
/**
|
|
469
|
+
* Create a batch of stateless background runs.
|
|
470
|
+
*
|
|
471
|
+
* @param payloads An array of payloads for creating runs.
|
|
472
|
+
* @returns An array of created runs.
|
|
473
|
+
*/
|
|
474
|
+
async createBatch(payloads) {
|
|
475
|
+
const filteredPayloads = payloads
|
|
476
|
+
.map((payload) => ({ ...payload, assistant_id: payload.assistantId }))
|
|
477
|
+
.map((payload) => {
|
|
478
|
+
return Object.fromEntries(Object.entries(payload).filter(([_, v]) => v !== undefined));
|
|
479
|
+
});
|
|
480
|
+
return this.fetch("/runs/batch", {
|
|
481
|
+
method: "POST",
|
|
482
|
+
json: filteredPayloads,
|
|
483
|
+
});
|
|
484
|
+
}
|
|
465
485
|
/**
|
|
466
486
|
* Create a run and wait for it to complete.
|
|
467
487
|
*
|
|
@@ -479,10 +499,11 @@ export class RunsClient extends BaseClient {
|
|
|
479
499
|
interrupt_before: payload?.interruptBefore,
|
|
480
500
|
interrupt_after: payload?.interruptAfter,
|
|
481
501
|
checkpoint_id: payload?.checkpointId,
|
|
502
|
+
webhook: payload?.webhook,
|
|
503
|
+
multitask_strategy: payload?.multitaskStrategy,
|
|
504
|
+
on_completion: payload?.onCompletion,
|
|
505
|
+
on_disconnect: payload?.onDisconnect,
|
|
482
506
|
};
|
|
483
|
-
if (payload?.multitaskStrategy != null) {
|
|
484
|
-
json["multitask_strategy"] = payload?.multitaskStrategy;
|
|
485
|
-
}
|
|
486
507
|
const endpoint = threadId == null ? `/runs/wait` : `/threads/${threadId}/runs/wait`;
|
|
487
508
|
return this.fetch(endpoint, {
|
|
488
509
|
method: "POST",
|
|
@@ -541,6 +562,54 @@ export class RunsClient extends BaseClient {
|
|
|
541
562
|
async join(threadId, runId) {
|
|
542
563
|
return this.fetch(`/threads/${threadId}/runs/${runId}/join`);
|
|
543
564
|
}
|
|
565
|
+
/**
|
|
566
|
+
* Stream output from a run in real-time, until the run is done.
|
|
567
|
+
* Output is not buffered, so any output produced before this call will
|
|
568
|
+
* not be received here.
|
|
569
|
+
*
|
|
570
|
+
* @param threadId The ID of the thread.
|
|
571
|
+
* @param runId The ID of the run.
|
|
572
|
+
* @param signal An optional abort signal.
|
|
573
|
+
* @returns An async generator yielding stream parts.
|
|
574
|
+
*/
|
|
575
|
+
async *joinStream(threadId, runId, signal) {
|
|
576
|
+
const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(`/threads/${threadId}/runs/${runId}/stream`, {
|
|
577
|
+
method: "GET",
|
|
578
|
+
signal,
|
|
579
|
+
}));
|
|
580
|
+
let parser;
|
|
581
|
+
let onEndEvent;
|
|
582
|
+
const textDecoder = new TextDecoder();
|
|
583
|
+
const stream = (response.body || new ReadableStream({ start: (ctrl) => ctrl.close() })).pipeThrough(new TransformStream({
|
|
584
|
+
async start(ctrl) {
|
|
585
|
+
parser = createParser((event) => {
|
|
586
|
+
if ((signal && signal.aborted) ||
|
|
587
|
+
(event.type === "event" && event.data === "[DONE]")) {
|
|
588
|
+
ctrl.terminate();
|
|
589
|
+
return;
|
|
590
|
+
}
|
|
591
|
+
if ("data" in event) {
|
|
592
|
+
ctrl.enqueue({
|
|
593
|
+
event: event.event ?? "message",
|
|
594
|
+
data: JSON.parse(event.data),
|
|
595
|
+
});
|
|
596
|
+
}
|
|
597
|
+
});
|
|
598
|
+
onEndEvent = () => {
|
|
599
|
+
ctrl.enqueue({ event: "end", data: undefined });
|
|
600
|
+
};
|
|
601
|
+
},
|
|
602
|
+
async transform(chunk) {
|
|
603
|
+
const payload = textDecoder.decode(chunk);
|
|
604
|
+
parser.feed(payload);
|
|
605
|
+
// eventsource-parser will ignore events
|
|
606
|
+
// that are not terminated by a newline
|
|
607
|
+
if (payload.trim() === "event: end")
|
|
608
|
+
onEndEvent();
|
|
609
|
+
},
|
|
610
|
+
}));
|
|
611
|
+
yield* IterableReadableStream.fromReadableStream(stream);
|
|
612
|
+
}
|
|
544
613
|
/**
|
|
545
614
|
* Delete a run.
|
|
546
615
|
*
|
package/dist/index.cjs
ADDED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { Client } from "./client.
|
|
1
|
+
export { Client } from "./client.js";
|
|
2
2
|
export type { Assistant, AssistantGraph, Config, DefaultValues, GraphSchema, Metadata, Run, Thread, ThreadState, Cron, } from "./schema.js";
|
package/dist/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { Client } from "./client.js";
|
package/dist/types.cjs
ADDED
|
@@ -2,6 +2,8 @@ import { Config, Metadata } from "./schema.js";
|
|
|
2
2
|
export type StreamMode = "values" | "messages" | "updates" | "events" | "debug";
|
|
3
3
|
export type MultitaskStrategy = "reject" | "interrupt" | "rollback" | "enqueue";
|
|
4
4
|
export type OnConflictBehavior = "raise" | "do_nothing";
|
|
5
|
+
export type OnCompletionBehavior = "complete" | "continue";
|
|
6
|
+
export type DisconnectMode = "cancel" | "continue";
|
|
5
7
|
export type StreamEvent = "events" | "metadata" | "debug" | "updates" | "values" | "messages/partial" | "messages/metadata" | "messages/complete" | (string & {});
|
|
6
8
|
interface RunsInvokePayload {
|
|
7
9
|
/**
|
|
@@ -43,6 +45,24 @@ interface RunsInvokePayload {
|
|
|
43
45
|
* Abort controller signal to cancel the run.
|
|
44
46
|
*/
|
|
45
47
|
signal?: AbortController["signal"];
|
|
48
|
+
/**
|
|
49
|
+
* Behavior to handle run completion. Only relevant if
|
|
50
|
+
* there is a pending/inflight run on the same thread. One of:
|
|
51
|
+
* - "complete": Complete the run.
|
|
52
|
+
* - "continue": Continue the run.
|
|
53
|
+
*/
|
|
54
|
+
onCompletion?: OnCompletionBehavior;
|
|
55
|
+
/**
|
|
56
|
+
* Webhook to call when the run is complete.
|
|
57
|
+
*/
|
|
58
|
+
webhook?: string;
|
|
59
|
+
/**
|
|
60
|
+
* Behavior to handle disconnection. Only relevant if
|
|
61
|
+
* there is a pending/inflight run on the same thread. One of:
|
|
62
|
+
* - "cancel": Cancel the run.
|
|
63
|
+
* - "continue": Continue the run.
|
|
64
|
+
*/
|
|
65
|
+
onDisconnect?: DisconnectMode;
|
|
46
66
|
}
|
|
47
67
|
export interface RunsStreamPayload extends RunsInvokePayload {
|
|
48
68
|
/**
|
|
@@ -62,10 +82,6 @@ export interface RunsStreamPayload extends RunsInvokePayload {
|
|
|
62
82
|
feedbackKeys?: string[];
|
|
63
83
|
}
|
|
64
84
|
export interface RunsCreatePayload extends RunsInvokePayload {
|
|
65
|
-
/**
|
|
66
|
-
* Webhook to call when the run is complete.
|
|
67
|
-
*/
|
|
68
|
-
webhook?: string;
|
|
69
85
|
}
|
|
70
86
|
export interface CronsCreatePayload extends RunsCreatePayload {
|
|
71
87
|
/**
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.AsyncCaller = void 0;
|
|
7
|
+
const p_retry_1 = __importDefault(require("p-retry"));
|
|
8
|
+
const p_queue_1 = __importDefault(require("p-queue"));
|
|
9
|
+
const STATUS_NO_RETRY = [
|
|
10
|
+
400, // Bad Request
|
|
11
|
+
401, // Unauthorized
|
|
12
|
+
403, // Forbidden
|
|
13
|
+
404, // Not Found
|
|
14
|
+
405, // Method Not Allowed
|
|
15
|
+
406, // Not Acceptable
|
|
16
|
+
407, // Proxy Authentication Required
|
|
17
|
+
408, // Request Timeout
|
|
18
|
+
422, // Unprocessable Entity
|
|
19
|
+
];
|
|
20
|
+
const STATUS_IGNORE = [
|
|
21
|
+
409, // Conflict
|
|
22
|
+
];
|
|
23
|
+
/**
|
|
24
|
+
* Do not rely on globalThis.Response, rather just
|
|
25
|
+
* do duck typing
|
|
26
|
+
*/
|
|
27
|
+
function isResponse(x) {
|
|
28
|
+
if (x == null || typeof x !== "object")
|
|
29
|
+
return false;
|
|
30
|
+
return "status" in x && "statusText" in x && "text" in x;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Utility error to properly handle failed requests
|
|
34
|
+
*/
|
|
35
|
+
class HTTPError extends Error {
|
|
36
|
+
constructor(status, message, response) {
|
|
37
|
+
super(`HTTP ${status}: ${message}`);
|
|
38
|
+
Object.defineProperty(this, "status", {
|
|
39
|
+
enumerable: true,
|
|
40
|
+
configurable: true,
|
|
41
|
+
writable: true,
|
|
42
|
+
value: void 0
|
|
43
|
+
});
|
|
44
|
+
Object.defineProperty(this, "text", {
|
|
45
|
+
enumerable: true,
|
|
46
|
+
configurable: true,
|
|
47
|
+
writable: true,
|
|
48
|
+
value: void 0
|
|
49
|
+
});
|
|
50
|
+
Object.defineProperty(this, "response", {
|
|
51
|
+
enumerable: true,
|
|
52
|
+
configurable: true,
|
|
53
|
+
writable: true,
|
|
54
|
+
value: void 0
|
|
55
|
+
});
|
|
56
|
+
this.status = status;
|
|
57
|
+
this.text = message;
|
|
58
|
+
this.response = response;
|
|
59
|
+
}
|
|
60
|
+
static async fromResponse(response, options) {
|
|
61
|
+
try {
|
|
62
|
+
return new HTTPError(response.status, await response.text(), options?.includeResponse ? response : undefined);
|
|
63
|
+
}
|
|
64
|
+
catch {
|
|
65
|
+
return new HTTPError(response.status, response.statusText, options?.includeResponse ? response : undefined);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* A class that can be used to make async calls with concurrency and retry logic.
|
|
71
|
+
*
|
|
72
|
+
* This is useful for making calls to any kind of "expensive" external resource,
|
|
73
|
+
* be it because it's rate-limited, subject to network issues, etc.
|
|
74
|
+
*
|
|
75
|
+
* Concurrent calls are limited by the `maxConcurrency` parameter, which defaults
|
|
76
|
+
* to `Infinity`. This means that by default, all calls will be made in parallel.
|
|
77
|
+
*
|
|
78
|
+
* Retries are limited by the `maxRetries` parameter, which defaults to 5. This
|
|
79
|
+
* means that by default, each call will be retried up to 5 times, with an
|
|
80
|
+
* exponential backoff between each attempt.
|
|
81
|
+
*/
|
|
82
|
+
class AsyncCaller {
|
|
83
|
+
constructor(params) {
|
|
84
|
+
Object.defineProperty(this, "maxConcurrency", {
|
|
85
|
+
enumerable: true,
|
|
86
|
+
configurable: true,
|
|
87
|
+
writable: true,
|
|
88
|
+
value: void 0
|
|
89
|
+
});
|
|
90
|
+
Object.defineProperty(this, "maxRetries", {
|
|
91
|
+
enumerable: true,
|
|
92
|
+
configurable: true,
|
|
93
|
+
writable: true,
|
|
94
|
+
value: void 0
|
|
95
|
+
});
|
|
96
|
+
Object.defineProperty(this, "queue", {
|
|
97
|
+
enumerable: true,
|
|
98
|
+
configurable: true,
|
|
99
|
+
writable: true,
|
|
100
|
+
value: void 0
|
|
101
|
+
});
|
|
102
|
+
Object.defineProperty(this, "onFailedResponseHook", {
|
|
103
|
+
enumerable: true,
|
|
104
|
+
configurable: true,
|
|
105
|
+
writable: true,
|
|
106
|
+
value: void 0
|
|
107
|
+
});
|
|
108
|
+
Object.defineProperty(this, "customFetch", {
|
|
109
|
+
enumerable: true,
|
|
110
|
+
configurable: true,
|
|
111
|
+
writable: true,
|
|
112
|
+
value: void 0
|
|
113
|
+
});
|
|
114
|
+
this.maxConcurrency = params.maxConcurrency ?? Infinity;
|
|
115
|
+
this.maxRetries = params.maxRetries ?? 4;
|
|
116
|
+
if ("default" in p_queue_1.default) {
|
|
117
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
118
|
+
this.queue = new p_queue_1.default.default({
|
|
119
|
+
concurrency: this.maxConcurrency,
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
else {
|
|
123
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
124
|
+
this.queue = new p_queue_1.default({ concurrency: this.maxConcurrency });
|
|
125
|
+
}
|
|
126
|
+
this.onFailedResponseHook = params?.onFailedResponseHook;
|
|
127
|
+
this.customFetch = params.fetch;
|
|
128
|
+
}
|
|
129
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
130
|
+
call(callable, ...args) {
|
|
131
|
+
const onFailedResponseHook = this.onFailedResponseHook;
|
|
132
|
+
return this.queue.add(() => (0, p_retry_1.default)(() => callable(...args).catch(async (error) => {
|
|
133
|
+
// eslint-disable-next-line no-instanceof/no-instanceof
|
|
134
|
+
if (error instanceof Error) {
|
|
135
|
+
throw error;
|
|
136
|
+
}
|
|
137
|
+
else if (isResponse(error)) {
|
|
138
|
+
throw await HTTPError.fromResponse(error, {
|
|
139
|
+
includeResponse: !!onFailedResponseHook,
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
else {
|
|
143
|
+
throw new Error(error);
|
|
144
|
+
}
|
|
145
|
+
}), {
|
|
146
|
+
async onFailedAttempt(error) {
|
|
147
|
+
if (error.message.startsWith("Cancel") ||
|
|
148
|
+
error.message.startsWith("TimeoutError") ||
|
|
149
|
+
error.message.startsWith("AbortError")) {
|
|
150
|
+
throw error;
|
|
151
|
+
}
|
|
152
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
153
|
+
if (error?.code === "ECONNABORTED") {
|
|
154
|
+
throw error;
|
|
155
|
+
}
|
|
156
|
+
if (error instanceof HTTPError) {
|
|
157
|
+
if (STATUS_NO_RETRY.includes(error.status)) {
|
|
158
|
+
throw error;
|
|
159
|
+
}
|
|
160
|
+
else if (STATUS_IGNORE.includes(error.status)) {
|
|
161
|
+
return;
|
|
162
|
+
}
|
|
163
|
+
if (onFailedResponseHook && error.response) {
|
|
164
|
+
await onFailedResponseHook(error.response);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
},
|
|
168
|
+
// If needed we can change some of the defaults here,
|
|
169
|
+
// but they're quite sensible.
|
|
170
|
+
retries: this.maxRetries,
|
|
171
|
+
randomize: true,
|
|
172
|
+
}), { throwOnTimeout: true });
|
|
173
|
+
}
|
|
174
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
175
|
+
callWithOptions(options, callable, ...args) {
|
|
176
|
+
// Note this doesn't cancel the underlying request,
|
|
177
|
+
// when available prefer to use the signal option of the underlying call
|
|
178
|
+
if (options.signal) {
|
|
179
|
+
return Promise.race([
|
|
180
|
+
this.call(callable, ...args),
|
|
181
|
+
new Promise((_, reject) => {
|
|
182
|
+
options.signal?.addEventListener("abort", () => {
|
|
183
|
+
reject(new Error("AbortError"));
|
|
184
|
+
});
|
|
185
|
+
}),
|
|
186
|
+
]);
|
|
187
|
+
}
|
|
188
|
+
return this.call(callable, ...args);
|
|
189
|
+
}
|
|
190
|
+
fetch(...args) {
|
|
191
|
+
const fetchFn = this.customFetch ?? fetch;
|
|
192
|
+
return this.call(() => fetchFn(...args).then((res) => (res.ok ? res : Promise.reject(res))));
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
exports.AsyncCaller = AsyncCaller;
|
|
@@ -16,7 +16,7 @@ export interface AsyncCallerParams {
|
|
|
16
16
|
*
|
|
17
17
|
* By default we expect the `fetch` is available in the global scope.
|
|
18
18
|
*/
|
|
19
|
-
fetch?: typeof fetch;
|
|
19
|
+
fetch?: typeof fetch | ((...args: any[]) => any);
|
|
20
20
|
}
|
|
21
21
|
export interface AsyncCallerCallOptions {
|
|
22
22
|
signal?: AbortSignal;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// From https://github.com/rexxars/eventsource-parser
|
|
3
|
+
// Inlined due to CJS import issues
|
|
4
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
+
exports.createParser = void 0;
|
|
6
|
+
var parse_js_1 = require("./parse.cjs");
|
|
7
|
+
Object.defineProperty(exports, "createParser", { enumerable: true, get: function () { return parse_js_1.createParser; } });
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createParser = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Creates a new EventSource parser.
|
|
6
|
+
*
|
|
7
|
+
* @param onParse - Callback to invoke when a new event is parsed, or a new reconnection interval
|
|
8
|
+
* has been sent from the server
|
|
9
|
+
*
|
|
10
|
+
* @returns A new EventSource parser, with `parse` and `reset` methods.
|
|
11
|
+
* @public
|
|
12
|
+
*/
|
|
13
|
+
function createParser(onParse) {
|
|
14
|
+
// Processing state
|
|
15
|
+
let isFirstChunk;
|
|
16
|
+
let buffer;
|
|
17
|
+
let startingPosition;
|
|
18
|
+
let startingFieldLength;
|
|
19
|
+
// Event state
|
|
20
|
+
let eventId;
|
|
21
|
+
let eventName;
|
|
22
|
+
let data;
|
|
23
|
+
reset();
|
|
24
|
+
return { feed, reset };
|
|
25
|
+
function reset() {
|
|
26
|
+
isFirstChunk = true;
|
|
27
|
+
buffer = "";
|
|
28
|
+
startingPosition = 0;
|
|
29
|
+
startingFieldLength = -1;
|
|
30
|
+
eventId = undefined;
|
|
31
|
+
eventName = undefined;
|
|
32
|
+
data = "";
|
|
33
|
+
}
|
|
34
|
+
function feed(chunk) {
|
|
35
|
+
buffer = buffer ? buffer + chunk : chunk;
|
|
36
|
+
// Strip any UTF8 byte order mark (BOM) at the start of the stream.
|
|
37
|
+
// Note that we do not strip any non - UTF8 BOM, as eventsource streams are
|
|
38
|
+
// always decoded as UTF8 as per the specification.
|
|
39
|
+
if (isFirstChunk && hasBom(buffer)) {
|
|
40
|
+
buffer = buffer.slice(BOM.length);
|
|
41
|
+
}
|
|
42
|
+
isFirstChunk = false;
|
|
43
|
+
// Set up chunk-specific processing state
|
|
44
|
+
const length = buffer.length;
|
|
45
|
+
let position = 0;
|
|
46
|
+
let discardTrailingNewline = false;
|
|
47
|
+
// Read the current buffer byte by byte
|
|
48
|
+
while (position < length) {
|
|
49
|
+
// EventSource allows for carriage return + line feed, which means we
|
|
50
|
+
// need to ignore a linefeed character if the previous character was a
|
|
51
|
+
// carriage return
|
|
52
|
+
// @todo refactor to reduce nesting, consider checking previous byte?
|
|
53
|
+
// @todo but consider multiple chunks etc
|
|
54
|
+
if (discardTrailingNewline) {
|
|
55
|
+
if (buffer[position] === "\n") {
|
|
56
|
+
++position;
|
|
57
|
+
}
|
|
58
|
+
discardTrailingNewline = false;
|
|
59
|
+
}
|
|
60
|
+
let lineLength = -1;
|
|
61
|
+
let fieldLength = startingFieldLength;
|
|
62
|
+
let character;
|
|
63
|
+
for (let index = startingPosition; lineLength < 0 && index < length; ++index) {
|
|
64
|
+
character = buffer[index];
|
|
65
|
+
if (character === ":" && fieldLength < 0) {
|
|
66
|
+
fieldLength = index - position;
|
|
67
|
+
}
|
|
68
|
+
else if (character === "\r") {
|
|
69
|
+
discardTrailingNewline = true;
|
|
70
|
+
lineLength = index - position;
|
|
71
|
+
}
|
|
72
|
+
else if (character === "\n") {
|
|
73
|
+
lineLength = index - position;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
if (lineLength < 0) {
|
|
77
|
+
startingPosition = length - position;
|
|
78
|
+
startingFieldLength = fieldLength;
|
|
79
|
+
break;
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
startingPosition = 0;
|
|
83
|
+
startingFieldLength = -1;
|
|
84
|
+
}
|
|
85
|
+
parseEventStreamLine(buffer, position, fieldLength, lineLength);
|
|
86
|
+
position += lineLength + 1;
|
|
87
|
+
}
|
|
88
|
+
if (position === length) {
|
|
89
|
+
// If we consumed the entire buffer to read the event, reset the buffer
|
|
90
|
+
buffer = "";
|
|
91
|
+
}
|
|
92
|
+
else if (position > 0) {
|
|
93
|
+
// If there are bytes left to process, set the buffer to the unprocessed
|
|
94
|
+
// portion of the buffer only
|
|
95
|
+
buffer = buffer.slice(position);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
function parseEventStreamLine(lineBuffer, index, fieldLength, lineLength) {
|
|
99
|
+
if (lineLength === 0) {
|
|
100
|
+
// We reached the last line of this event
|
|
101
|
+
if (data.length > 0) {
|
|
102
|
+
onParse({
|
|
103
|
+
type: "event",
|
|
104
|
+
id: eventId,
|
|
105
|
+
event: eventName || undefined,
|
|
106
|
+
data: data.slice(0, -1), // remove trailing newline
|
|
107
|
+
});
|
|
108
|
+
data = "";
|
|
109
|
+
eventId = undefined;
|
|
110
|
+
}
|
|
111
|
+
eventName = undefined;
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
const noValue = fieldLength < 0;
|
|
115
|
+
const field = lineBuffer.slice(index, index + (noValue ? lineLength : fieldLength));
|
|
116
|
+
let step = 0;
|
|
117
|
+
if (noValue) {
|
|
118
|
+
step = lineLength;
|
|
119
|
+
}
|
|
120
|
+
else if (lineBuffer[index + fieldLength + 1] === " ") {
|
|
121
|
+
step = fieldLength + 2;
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
step = fieldLength + 1;
|
|
125
|
+
}
|
|
126
|
+
const position = index + step;
|
|
127
|
+
const valueLength = lineLength - step;
|
|
128
|
+
const value = lineBuffer.slice(position, position + valueLength).toString();
|
|
129
|
+
if (field === "data") {
|
|
130
|
+
data += value ? `${value}\n` : "\n";
|
|
131
|
+
}
|
|
132
|
+
else if (field === "event") {
|
|
133
|
+
eventName = value;
|
|
134
|
+
}
|
|
135
|
+
else if (field === "id" && !value.includes("\u0000")) {
|
|
136
|
+
eventId = value;
|
|
137
|
+
}
|
|
138
|
+
else if (field === "retry") {
|
|
139
|
+
const retry = parseInt(value, 10);
|
|
140
|
+
if (!Number.isNaN(retry)) {
|
|
141
|
+
onParse({ type: "reconnect-interval", value: retry });
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
exports.createParser = createParser;
|
|
147
|
+
const BOM = [239, 187, 191];
|
|
148
|
+
function hasBom(buffer) {
|
|
149
|
+
return BOM.every((charCode, index) => buffer.charCodeAt(index) === charCode);
|
|
150
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* EventSource/Server-Sent Events parser
|
|
3
|
+
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html
|
|
4
|
+
*
|
|
5
|
+
* Based on code from the {@link https://github.com/EventSource/eventsource | EventSource module},
|
|
6
|
+
* which is licensed under the MIT license. And copyrighted the EventSource GitHub organisation.
|
|
7
|
+
*/
|
|
8
|
+
import type { EventSourceParseCallback, EventSourceParser } from "./types.js";
|
|
9
|
+
/**
|
|
10
|
+
* Creates a new EventSource parser.
|
|
11
|
+
*
|
|
12
|
+
* @param onParse - Callback to invoke when a new event is parsed, or a new reconnection interval
|
|
13
|
+
* has been sent from the server
|
|
14
|
+
*
|
|
15
|
+
* @returns A new EventSource parser, with `parse` and `reset` methods.
|
|
16
|
+
* @public
|
|
17
|
+
*/
|
|
18
|
+
export declare function createParser(onParse: EventSourceParseCallback): EventSourceParser;
|