@livekit/agents 1.0.32 → 1.0.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/inference/llm.cjs +0 -2
- package/dist/inference/llm.cjs.map +1 -1
- package/dist/inference/llm.d.ts.map +1 -1
- package/dist/inference/llm.js +0 -2
- package/dist/inference/llm.js.map +1 -1
- package/dist/llm/fallback_adapter.cjs +278 -0
- package/dist/llm/fallback_adapter.cjs.map +1 -0
- package/dist/llm/fallback_adapter.d.cts +73 -0
- package/dist/llm/fallback_adapter.d.ts +73 -0
- package/dist/llm/fallback_adapter.d.ts.map +1 -0
- package/dist/llm/fallback_adapter.js +254 -0
- package/dist/llm/fallback_adapter.js.map +1 -0
- package/dist/llm/fallback_adapter.test.cjs +176 -0
- package/dist/llm/fallback_adapter.test.cjs.map +1 -0
- package/dist/llm/fallback_adapter.test.js +175 -0
- package/dist/llm/fallback_adapter.test.js.map +1 -0
- package/dist/llm/index.cjs +3 -0
- package/dist/llm/index.cjs.map +1 -1
- package/dist/llm/index.d.cts +1 -0
- package/dist/llm/index.d.ts +1 -0
- package/dist/llm/index.d.ts.map +1 -1
- package/dist/llm/index.js +4 -0
- package/dist/llm/index.js.map +1 -1
- package/dist/llm/llm.cjs +1 -1
- package/dist/llm/llm.cjs.map +1 -1
- package/dist/llm/llm.js +1 -1
- package/dist/llm/llm.js.map +1 -1
- package/dist/log.cjs +3 -3
- package/dist/log.cjs.map +1 -1
- package/dist/log.d.cts +0 -5
- package/dist/log.d.ts +0 -5
- package/dist/log.d.ts.map +1 -1
- package/dist/log.js +3 -3
- package/dist/log.js.map +1 -1
- package/dist/stt/stt.cjs +1 -1
- package/dist/stt/stt.cjs.map +1 -1
- package/dist/stt/stt.js +1 -1
- package/dist/stt/stt.js.map +1 -1
- package/dist/tts/tts.cjs +2 -2
- package/dist/tts/tts.cjs.map +1 -1
- package/dist/tts/tts.js +2 -2
- package/dist/tts/tts.js.map +1 -1
- package/package.json +1 -1
- package/src/inference/llm.ts +0 -2
- package/src/llm/fallback_adapter.test.ts +238 -0
- package/src/llm/fallback_adapter.ts +391 -0
- package/src/llm/index.ts +6 -0
- package/src/llm/llm.ts +1 -1
- package/src/log.ts +3 -9
- package/src/stt/stt.ts +1 -1
- package/src/tts/tts.ts +2 -2
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { type APIConnectOptions } from '../types.js';
|
|
2
|
+
import type { ChatContext } from './chat_context.js';
|
|
3
|
+
import { LLM, LLMStream } from './llm.js';
|
|
4
|
+
import type { ToolChoice, ToolContext } from './tool_context.js';
|
|
5
|
+
/**
|
|
6
|
+
* Internal status tracking for each LLM instance.
|
|
7
|
+
*/
|
|
8
|
+
interface LLMStatus {
|
|
9
|
+
available: boolean;
|
|
10
|
+
recoveringTask: Promise<void> | null;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Event emitted when an LLM's availability changes.
|
|
14
|
+
*/
|
|
15
|
+
export interface AvailabilityChangedEvent {
|
|
16
|
+
llm: LLM;
|
|
17
|
+
available: boolean;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Options for creating a FallbackAdapter.
|
|
21
|
+
*/
|
|
22
|
+
export interface FallbackAdapterOptions {
|
|
23
|
+
/** List of LLM instances to fallback to (in order). */
|
|
24
|
+
llms: LLM[];
|
|
25
|
+
/** Timeout for each LLM attempt in seconds. Defaults to 5.0. */
|
|
26
|
+
attemptTimeout?: number;
|
|
27
|
+
/** Internal retries per LLM before moving to next. Defaults to 0. */
|
|
28
|
+
maxRetryPerLLM?: number;
|
|
29
|
+
/** Interval between retries in seconds. Defaults to 0.5. */
|
|
30
|
+
retryInterval?: number;
|
|
31
|
+
/** Whether to retry when LLM fails after chunks are sent. Defaults to false. */
|
|
32
|
+
retryOnChunkSent?: boolean;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* FallbackAdapter is an LLM that can fallback to a different LLM if the current LLM fails.
|
|
36
|
+
*
|
|
37
|
+
* @example
|
|
38
|
+
* ```typescript
|
|
39
|
+
* const fallbackLLM = new FallbackAdapter({
|
|
40
|
+
* llms: [primaryLLM, secondaryLLM, tertiaryLLM],
|
|
41
|
+
* attemptTimeout: 5.0,
|
|
42
|
+
* maxRetryPerLLM: 1,
|
|
43
|
+
* });
|
|
44
|
+
* ```
|
|
45
|
+
*/
|
|
46
|
+
export declare class FallbackAdapter extends LLM {
|
|
47
|
+
readonly llms: LLM[];
|
|
48
|
+
readonly attemptTimeout: number;
|
|
49
|
+
readonly maxRetryPerLLM: number;
|
|
50
|
+
readonly retryInterval: number;
|
|
51
|
+
readonly retryOnChunkSent: boolean;
|
|
52
|
+
/** @internal */
|
|
53
|
+
_status: LLMStatus[];
|
|
54
|
+
private logger;
|
|
55
|
+
constructor(options: FallbackAdapterOptions);
|
|
56
|
+
get model(): string;
|
|
57
|
+
label(): string;
|
|
58
|
+
chat(opts: {
|
|
59
|
+
chatCtx: ChatContext;
|
|
60
|
+
toolCtx?: ToolContext;
|
|
61
|
+
connOptions?: APIConnectOptions;
|
|
62
|
+
parallelToolCalls?: boolean;
|
|
63
|
+
toolChoice?: ToolChoice;
|
|
64
|
+
extraKwargs?: Record<string, unknown>;
|
|
65
|
+
}): LLMStream;
|
|
66
|
+
/**
|
|
67
|
+
* Emit availability changed event.
|
|
68
|
+
* @internal
|
|
69
|
+
*/
|
|
70
|
+
_emitAvailabilityChanged(llm: LLM, available: boolean): void;
|
|
71
|
+
}
|
|
72
|
+
export {};
|
|
73
|
+
//# sourceMappingURL=fallback_adapter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fallback_adapter.d.ts","sourceRoot":"","sources":["../../src/llm/fallback_adapter.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,KAAK,iBAAiB,EAA+B,MAAM,aAAa,CAAC;AAClF,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAErD,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AAC1C,OAAO,KAAK,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAYjE;;GAEG;AACH,UAAU,SAAS;IACjB,SAAS,EAAE,OAAO,CAAC;IACnB,cAAc,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC;CACtC;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC,GAAG,EAAE,GAAG,CAAC;IACT,SAAS,EAAE,OAAO,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC,uDAAuD;IACvD,IAAI,EAAE,GAAG,EAAE,CAAC;IACZ,gEAAgE;IAChE,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,qEAAqE;IACrE,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,4DAA4D;IAC5D,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,gFAAgF;IAChF,gBAAgB,CAAC,EAAE,OAAO,CAAC;CAC5B;AAED;;;;;;;;;;;GAWG;AACH,qBAAa,eAAgB,SAAQ,GAAG;IACtC,QAAQ,CAAC,IAAI,EAAE,GAAG,EAAE,CAAC;IACrB,QAAQ,CAAC,cAAc,EAAE,MAAM,CAAC;IAChC,QAAQ,CAAC,cAAc,EAAE,MAAM,CAAC;IAChC,QAAQ,CAAC,aAAa,EAAE,MAAM,CAAC;IAC/B,QAAQ,CAAC,gBAAgB,EAAE,OAAO,CAAC;IAEnC,gBAAgB;IAChB,OAAO,EAAE,SAAS,EAAE,CAAC;IAErB,OAAO,CAAC,MAAM,CAAS;gBAEX,OAAO,EAAE,sBAAsB;IA2B3C,IAAI,KAAK,IAAI,MAAM,CAElB;IAED,KAAK,IAAI,MAAM;IAIf,IAAI,CAAC,IAAI,EAAE;QACT,OAAO,EAAE,WAAW,CAAC;QACrB,OAAO,CAAC,EAAE,WAAW,CAAC;QACtB,WAAW,CAAC,EAAE,iBAAiB,CAAC;QAChC,iBAAiB,CAAC,EAAE,OAAO,CAAC;QAC5B,UAAU,CAAC,EAAE,UAAU,CAAC;QACxB,WAAW,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACvC,GAAG,SAAS;IAWb;;;OAGG;IACH,wBAAwB,CAAC,GAAG,EAAE,GAAG,EAAE,SAAS,EAAE,OAAO,GAAG,IAAI;CAQ7D"}
|
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
import { APIConnectionError, APIError } from "../_exceptions.js";
|
|
2
|
+
import { log } from "../log.js";
|
|
3
|
+
import { DEFAULT_API_CONNECT_OPTIONS } from "../types.js";
|
|
4
|
+
import { LLM, LLMStream } from "./llm.js";
|
|
5
|
+
const DEFAULT_FALLBACK_API_CONNECT_OPTIONS = {
|
|
6
|
+
maxRetry: 0,
|
|
7
|
+
timeoutMs: DEFAULT_API_CONNECT_OPTIONS.timeoutMs,
|
|
8
|
+
retryIntervalMs: DEFAULT_API_CONNECT_OPTIONS.retryIntervalMs
|
|
9
|
+
};
|
|
10
|
+
class FallbackAdapter extends LLM {
|
|
11
|
+
llms;
|
|
12
|
+
attemptTimeout;
|
|
13
|
+
maxRetryPerLLM;
|
|
14
|
+
retryInterval;
|
|
15
|
+
retryOnChunkSent;
|
|
16
|
+
/** @internal */
|
|
17
|
+
_status;
|
|
18
|
+
logger = log();
|
|
19
|
+
constructor(options) {
|
|
20
|
+
super();
|
|
21
|
+
if (!options.llms || options.llms.length < 1) {
|
|
22
|
+
throw new Error("at least one LLM instance must be provided.");
|
|
23
|
+
}
|
|
24
|
+
this.llms = options.llms;
|
|
25
|
+
this.attemptTimeout = options.attemptTimeout ?? 5;
|
|
26
|
+
this.maxRetryPerLLM = options.maxRetryPerLLM ?? 0;
|
|
27
|
+
this.retryInterval = options.retryInterval ?? 0.5;
|
|
28
|
+
this.retryOnChunkSent = options.retryOnChunkSent ?? false;
|
|
29
|
+
this._status = this.llms.map(() => ({
|
|
30
|
+
available: true,
|
|
31
|
+
recoveringTask: null
|
|
32
|
+
}));
|
|
33
|
+
for (const llm of this.llms) {
|
|
34
|
+
llm.on("metrics_collected", (metrics) => {
|
|
35
|
+
this.emit("metrics_collected", metrics);
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
get model() {
|
|
40
|
+
return "FallbackAdapter";
|
|
41
|
+
}
|
|
42
|
+
label() {
|
|
43
|
+
return "FallbackAdapter";
|
|
44
|
+
}
|
|
45
|
+
chat(opts) {
|
|
46
|
+
return new FallbackLLMStream(this, {
|
|
47
|
+
chatCtx: opts.chatCtx,
|
|
48
|
+
toolCtx: opts.toolCtx,
|
|
49
|
+
connOptions: opts.connOptions || DEFAULT_FALLBACK_API_CONNECT_OPTIONS,
|
|
50
|
+
parallelToolCalls: opts.parallelToolCalls,
|
|
51
|
+
toolChoice: opts.toolChoice,
|
|
52
|
+
extraKwargs: opts.extraKwargs
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Emit availability changed event.
|
|
57
|
+
* @internal
|
|
58
|
+
*/
|
|
59
|
+
_emitAvailabilityChanged(llm, available) {
|
|
60
|
+
const event = { llm, available };
|
|
61
|
+
this.emit(
|
|
62
|
+
"llm_availability_changed",
|
|
63
|
+
event
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
class FallbackLLMStream extends LLMStream {
|
|
68
|
+
adapter;
|
|
69
|
+
parallelToolCalls;
|
|
70
|
+
toolChoice;
|
|
71
|
+
extraKwargs;
|
|
72
|
+
_currentStream;
|
|
73
|
+
_log = log();
|
|
74
|
+
constructor(adapter, opts) {
|
|
75
|
+
super(adapter, {
|
|
76
|
+
chatCtx: opts.chatCtx,
|
|
77
|
+
toolCtx: opts.toolCtx,
|
|
78
|
+
connOptions: opts.connOptions
|
|
79
|
+
});
|
|
80
|
+
this.adapter = adapter;
|
|
81
|
+
this.parallelToolCalls = opts.parallelToolCalls;
|
|
82
|
+
this.toolChoice = opts.toolChoice;
|
|
83
|
+
this.extraKwargs = opts.extraKwargs;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Override chatCtx to return current stream's context if available.
|
|
87
|
+
*/
|
|
88
|
+
get chatCtx() {
|
|
89
|
+
var _a;
|
|
90
|
+
return ((_a = this._currentStream) == null ? void 0 : _a.chatCtx) ?? super.chatCtx;
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Try to generate with a single LLM.
|
|
94
|
+
* Returns an async generator that yields chunks.
|
|
95
|
+
*/
|
|
96
|
+
async *tryGenerate(llm, checkRecovery = false) {
|
|
97
|
+
const connOptions = {
|
|
98
|
+
...this.connOptions,
|
|
99
|
+
maxRetry: this.adapter.maxRetryPerLLM,
|
|
100
|
+
timeoutMs: this.adapter.attemptTimeout * 1e3,
|
|
101
|
+
retryIntervalMs: this.adapter.retryInterval * 1e3
|
|
102
|
+
};
|
|
103
|
+
const stream = llm.chat({
|
|
104
|
+
chatCtx: super.chatCtx,
|
|
105
|
+
toolCtx: this.toolCtx,
|
|
106
|
+
connOptions,
|
|
107
|
+
parallelToolCalls: this.parallelToolCalls,
|
|
108
|
+
toolChoice: this.toolChoice,
|
|
109
|
+
extraKwargs: this.extraKwargs
|
|
110
|
+
});
|
|
111
|
+
let streamError;
|
|
112
|
+
const errorHandler = (ev) => {
|
|
113
|
+
streamError = ev.error;
|
|
114
|
+
};
|
|
115
|
+
llm.on("error", errorHandler);
|
|
116
|
+
try {
|
|
117
|
+
let shouldSetCurrent = !checkRecovery;
|
|
118
|
+
for await (const chunk of stream) {
|
|
119
|
+
if (shouldSetCurrent) {
|
|
120
|
+
shouldSetCurrent = false;
|
|
121
|
+
this._currentStream = stream;
|
|
122
|
+
}
|
|
123
|
+
yield chunk;
|
|
124
|
+
}
|
|
125
|
+
if (streamError) {
|
|
126
|
+
throw streamError;
|
|
127
|
+
}
|
|
128
|
+
} catch (error) {
|
|
129
|
+
if (error instanceof APIError) {
|
|
130
|
+
if (checkRecovery) {
|
|
131
|
+
this._log.warn({ llm: llm.label(), error }, "recovery failed");
|
|
132
|
+
} else {
|
|
133
|
+
this._log.warn({ llm: llm.label(), error }, "failed, switching to next LLM");
|
|
134
|
+
}
|
|
135
|
+
throw error;
|
|
136
|
+
}
|
|
137
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
138
|
+
if (checkRecovery) {
|
|
139
|
+
this._log.warn({ llm: llm.label() }, "recovery timed out");
|
|
140
|
+
} else {
|
|
141
|
+
this._log.warn({ llm: llm.label() }, "timed out, switching to next LLM");
|
|
142
|
+
}
|
|
143
|
+
throw error;
|
|
144
|
+
}
|
|
145
|
+
if (checkRecovery) {
|
|
146
|
+
this._log.error({ llm: llm.label(), error }, "recovery unexpected error");
|
|
147
|
+
} else {
|
|
148
|
+
this._log.error({ llm: llm.label(), error }, "unexpected error, switching to next LLM");
|
|
149
|
+
}
|
|
150
|
+
throw error;
|
|
151
|
+
} finally {
|
|
152
|
+
llm.off("error", errorHandler);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* Start background recovery task for an LLM.
|
|
157
|
+
*/
|
|
158
|
+
tryRecovery(llm, index) {
|
|
159
|
+
const status = this.adapter._status[index];
|
|
160
|
+
if (status.recoveringTask !== null) {
|
|
161
|
+
return;
|
|
162
|
+
}
|
|
163
|
+
const recoverTask = async () => {
|
|
164
|
+
try {
|
|
165
|
+
for await (const _chunk of this.tryGenerate(llm, true)) {
|
|
166
|
+
}
|
|
167
|
+
status.available = true;
|
|
168
|
+
this._log.info({ llm: llm.label() }, "LLM recovered");
|
|
169
|
+
this.adapter._emitAvailabilityChanged(llm, true);
|
|
170
|
+
} catch {
|
|
171
|
+
} finally {
|
|
172
|
+
status.recoveringTask = null;
|
|
173
|
+
}
|
|
174
|
+
};
|
|
175
|
+
status.recoveringTask = recoverTask();
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Main run method - iterates through LLMs with fallback logic.
|
|
179
|
+
*/
|
|
180
|
+
async run() {
|
|
181
|
+
const startTime = Date.now();
|
|
182
|
+
const allFailed = this.adapter._status.every((s) => !s.available);
|
|
183
|
+
if (allFailed) {
|
|
184
|
+
this._log.error("all LLMs are unavailable, retrying...");
|
|
185
|
+
}
|
|
186
|
+
for (let i = 0; i < this.adapter.llms.length; i++) {
|
|
187
|
+
const llm = this.adapter.llms[i];
|
|
188
|
+
const status = this.adapter._status[i];
|
|
189
|
+
this._log.debug(
|
|
190
|
+
{ llm: llm.label(), index: i, available: status.available, allFailed },
|
|
191
|
+
"checking LLM"
|
|
192
|
+
);
|
|
193
|
+
if (status.available || allFailed) {
|
|
194
|
+
let textSent = "";
|
|
195
|
+
const toolCallsSent = [];
|
|
196
|
+
try {
|
|
197
|
+
this._log.info({ llm: llm.label() }, "FallbackAdapter: Attempting provider");
|
|
198
|
+
let chunkCount = 0;
|
|
199
|
+
for await (const chunk of this.tryGenerate(llm, false)) {
|
|
200
|
+
chunkCount++;
|
|
201
|
+
if (chunk.delta) {
|
|
202
|
+
if (chunk.delta.content) {
|
|
203
|
+
textSent += chunk.delta.content;
|
|
204
|
+
}
|
|
205
|
+
if (chunk.delta.toolCalls) {
|
|
206
|
+
for (const tc of chunk.delta.toolCalls) {
|
|
207
|
+
if (tc.name) {
|
|
208
|
+
toolCallsSent.push(tc.name);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
this._log.debug({ llm: llm.label(), chunkCount }, "run: forwarding chunk to queue");
|
|
214
|
+
this.queue.put(chunk);
|
|
215
|
+
}
|
|
216
|
+
this._log.info(
|
|
217
|
+
{ llm: llm.label(), totalChunks: chunkCount, textLength: textSent.length },
|
|
218
|
+
"FallbackAdapter: Provider succeeded"
|
|
219
|
+
);
|
|
220
|
+
return;
|
|
221
|
+
} catch (error) {
|
|
222
|
+
if (status.available) {
|
|
223
|
+
status.available = false;
|
|
224
|
+
this.adapter._emitAvailabilityChanged(llm, false);
|
|
225
|
+
}
|
|
226
|
+
if (textSent || toolCallsSent.length > 0) {
|
|
227
|
+
const extra = { textSent, toolCallsSent };
|
|
228
|
+
if (!this.adapter.retryOnChunkSent) {
|
|
229
|
+
this._log.error(
|
|
230
|
+
{ llm: llm.label(), ...extra },
|
|
231
|
+
"failed after sending chunk, skip retrying. Set `retryOnChunkSent` to `true` to enable."
|
|
232
|
+
);
|
|
233
|
+
throw error;
|
|
234
|
+
}
|
|
235
|
+
this._log.warn(
|
|
236
|
+
{ llm: llm.label(), ...extra },
|
|
237
|
+
"failed after sending chunk, retrying..."
|
|
238
|
+
);
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
this.tryRecovery(llm, i);
|
|
243
|
+
}
|
|
244
|
+
const duration = (Date.now() - startTime) / 1e3;
|
|
245
|
+
const labels = this.adapter.llms.map((l) => l.label()).join(", ");
|
|
246
|
+
throw new APIConnectionError({
|
|
247
|
+
message: `all LLMs failed (${labels}) after ${duration.toFixed(2)}s`
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
export {
|
|
252
|
+
FallbackAdapter
|
|
253
|
+
};
|
|
254
|
+
//# sourceMappingURL=fallback_adapter.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/llm/fallback_adapter.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS } from '../types.js';\nimport type { ChatContext } from './chat_context.js';\nimport type { ChatChunk } from './llm.js';\nimport { LLM, LLMStream } from './llm.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\n/**\n * Default connection options for FallbackAdapter.\n * Uses max_retry=0 since fallback handles retries at a higher level.\n */\nconst DEFAULT_FALLBACK_API_CONNECT_OPTIONS: APIConnectOptions = {\n maxRetry: 0,\n timeoutMs: DEFAULT_API_CONNECT_OPTIONS.timeoutMs,\n retryIntervalMs: DEFAULT_API_CONNECT_OPTIONS.retryIntervalMs,\n};\n\n/**\n * Internal status tracking for each LLM instance.\n */\ninterface LLMStatus {\n available: boolean;\n recoveringTask: Promise<void> | null;\n}\n\n/**\n * Event emitted when an LLM's availability changes.\n */\nexport interface AvailabilityChangedEvent {\n llm: LLM;\n available: boolean;\n}\n\n/**\n * Options for creating a FallbackAdapter.\n */\nexport interface FallbackAdapterOptions {\n /** List of LLM instances to fallback to (in order). */\n llms: LLM[];\n /** Timeout for each LLM attempt in seconds. Defaults to 5.0. */\n attemptTimeout?: number;\n /** Internal retries per LLM before moving to next. Defaults to 0. */\n maxRetryPerLLM?: number;\n /** Interval between retries in seconds. Defaults to 0.5. */\n retryInterval?: number;\n /** Whether to retry when LLM fails after chunks are sent. Defaults to false. */\n retryOnChunkSent?: boolean;\n}\n\n/**\n * FallbackAdapter is an LLM that can fallback to a different LLM if the current LLM fails.\n *\n * @example\n * ```typescript\n * const fallbackLLM = new FallbackAdapter({\n * llms: [primaryLLM, secondaryLLM, tertiaryLLM],\n * attemptTimeout: 5.0,\n * maxRetryPerLLM: 1,\n * });\n * ```\n */\nexport class FallbackAdapter extends LLM {\n readonly llms: LLM[];\n readonly attemptTimeout: number;\n readonly maxRetryPerLLM: number;\n readonly retryInterval: number;\n readonly retryOnChunkSent: boolean;\n\n /** @internal */\n _status: LLMStatus[];\n\n private logger = log();\n\n constructor(options: FallbackAdapterOptions) {\n super();\n\n if (!options.llms || options.llms.length < 1) {\n throw new Error('at least one LLM instance must be provided.');\n }\n\n this.llms = options.llms;\n this.attemptTimeout = options.attemptTimeout ?? 5.0;\n this.maxRetryPerLLM = options.maxRetryPerLLM ?? 0;\n this.retryInterval = options.retryInterval ?? 0.5;\n this.retryOnChunkSent = options.retryOnChunkSent ?? false;\n\n // Initialize status for each LLM\n this._status = this.llms.map(() => ({\n available: true,\n recoveringTask: null,\n }));\n\n // Forward metrics_collected events from child LLMs\n for (const llm of this.llms) {\n llm.on('metrics_collected', (metrics) => {\n this.emit('metrics_collected', metrics);\n });\n }\n }\n\n get model(): string {\n return 'FallbackAdapter';\n }\n\n label(): string {\n return 'FallbackAdapter';\n }\n\n chat(opts: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream {\n return new FallbackLLMStream(this, {\n chatCtx: opts.chatCtx,\n toolCtx: opts.toolCtx,\n connOptions: opts.connOptions || DEFAULT_FALLBACK_API_CONNECT_OPTIONS,\n parallelToolCalls: opts.parallelToolCalls,\n toolChoice: opts.toolChoice,\n extraKwargs: opts.extraKwargs,\n });\n }\n\n /**\n * Emit availability changed event.\n * @internal\n */\n _emitAvailabilityChanged(llm: LLM, available: boolean): void {\n const event: AvailabilityChangedEvent = { llm, available };\n // Use type assertion for custom event\n (this as unknown as { emit: (event: string, data: AvailabilityChangedEvent) => void }).emit(\n 'llm_availability_changed',\n event,\n );\n }\n}\n\n/**\n * LLMStream implementation for FallbackAdapter.\n * Handles fallback logic between multiple LLM providers.\n */\nclass FallbackLLMStream extends LLMStream {\n private adapter: FallbackAdapter;\n private parallelToolCalls?: boolean;\n private toolChoice?: ToolChoice;\n private extraKwargs?: Record<string, unknown>;\n private _currentStream?: LLMStream;\n private _log = log();\n\n constructor(\n adapter: FallbackAdapter,\n opts: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n },\n ) {\n super(adapter, {\n chatCtx: opts.chatCtx,\n toolCtx: opts.toolCtx,\n connOptions: opts.connOptions,\n });\n this.adapter = adapter;\n this.parallelToolCalls = opts.parallelToolCalls;\n this.toolChoice = opts.toolChoice;\n this.extraKwargs = opts.extraKwargs;\n }\n\n /**\n * Override chatCtx to return current stream's context if available.\n */\n override get chatCtx(): ChatContext {\n return this._currentStream?.chatCtx ?? super.chatCtx;\n }\n\n /**\n * Try to generate with a single LLM.\n * Returns an async generator that yields chunks.\n */\n private async *tryGenerate(\n llm: LLM,\n checkRecovery: boolean = false,\n ): AsyncGenerator<ChatChunk, void, unknown> {\n const connOptions: APIConnectOptions = {\n ...this.connOptions,\n maxRetry: this.adapter.maxRetryPerLLM,\n timeoutMs: this.adapter.attemptTimeout * 1000,\n retryIntervalMs: this.adapter.retryInterval * 1000,\n };\n\n const stream = llm.chat({\n chatCtx: super.chatCtx,\n toolCtx: this.toolCtx,\n connOptions,\n parallelToolCalls: this.parallelToolCalls,\n toolChoice: this.toolChoice,\n extraKwargs: this.extraKwargs,\n });\n\n // Listen for error events - child LLMs emit errors via their LLM instance, not the stream\n let streamError: Error | undefined;\n const errorHandler = (ev: { error: Error }) => {\n streamError = ev.error;\n };\n llm.on('error', errorHandler);\n\n try {\n let shouldSetCurrent = !checkRecovery;\n for await (const chunk of stream) {\n if (shouldSetCurrent) {\n shouldSetCurrent = false;\n this._currentStream = stream;\n }\n yield chunk;\n }\n\n // If an error was emitted but not thrown through iteration, throw it now\n if (streamError) {\n throw streamError;\n }\n } catch (error) {\n if (error instanceof APIError) {\n if (checkRecovery) {\n this._log.warn({ llm: llm.label(), error }, 'recovery failed');\n } else {\n this._log.warn({ llm: llm.label(), error }, 'failed, switching to next LLM');\n }\n throw error;\n }\n\n // Handle timeout errors\n if (error instanceof Error && error.name === 'AbortError') {\n if (checkRecovery) {\n this._log.warn({ llm: llm.label() }, 'recovery timed out');\n } else {\n this._log.warn({ llm: llm.label() }, 'timed out, switching to next LLM');\n }\n throw error;\n }\n\n // Unexpected error\n if (checkRecovery) {\n this._log.error({ llm: llm.label(), error }, 'recovery unexpected error');\n } else {\n this._log.error({ llm: llm.label(), error }, 'unexpected error, switching to next LLM');\n }\n throw error;\n } finally {\n llm.off('error', errorHandler);\n }\n }\n\n /**\n * Start background recovery task for an LLM.\n */\n private tryRecovery(llm: LLM, index: number): void {\n const status = this.adapter._status[index]!;\n\n // Skip if already recovering\n if (status.recoveringTask !== null) {\n return;\n }\n\n const recoverTask = async (): Promise<void> => {\n try {\n // Try to generate (just iterate to check if it works)\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n for await (const _chunk of this.tryGenerate(llm, true)) {\n // Just consume the stream to verify it works\n }\n\n // Recovery successful\n status.available = true;\n this._log.info({ llm: llm.label() }, 'LLM recovered');\n this.adapter._emitAvailabilityChanged(llm, true);\n } catch {\n // Recovery failed, stay unavailable\n } finally {\n status.recoveringTask = null;\n }\n };\n\n // Fire and forget\n status.recoveringTask = recoverTask();\n }\n\n /**\n * Main run method - iterates through LLMs with fallback logic.\n */\n protected async run(): Promise<void> {\n const startTime = Date.now();\n\n // Check if all LLMs are unavailable\n const allFailed = this.adapter._status.every((s) => !s.available);\n if (allFailed) {\n this._log.error('all LLMs are unavailable, retrying...');\n }\n\n for (let i = 0; i < this.adapter.llms.length; i++) {\n const llm = this.adapter.llms[i]!;\n const status = this.adapter._status[i]!;\n\n this._log.debug(\n { llm: llm.label(), index: i, available: status.available, allFailed },\n 'checking LLM',\n );\n\n if (status.available || allFailed) {\n let textSent = '';\n const toolCallsSent: string[] = [];\n\n try {\n this._log.info({ llm: llm.label() }, 'FallbackAdapter: Attempting provider');\n\n let chunkCount = 0;\n for await (const chunk of this.tryGenerate(llm, false)) {\n chunkCount++;\n // Track what's been sent\n if (chunk.delta) {\n if (chunk.delta.content) {\n textSent += chunk.delta.content;\n }\n if (chunk.delta.toolCalls) {\n for (const tc of chunk.delta.toolCalls) {\n if (tc.name) {\n toolCallsSent.push(tc.name);\n }\n }\n }\n }\n\n // Forward chunk to queue\n this._log.debug({ llm: llm.label(), chunkCount }, 'run: forwarding chunk to queue');\n this.queue.put(chunk);\n }\n\n // Success!\n this._log.info(\n { llm: llm.label(), totalChunks: chunkCount, textLength: textSent.length },\n 'FallbackAdapter: Provider succeeded',\n );\n return;\n } catch (error) {\n // Mark as unavailable if it was available before\n if (status.available) {\n status.available = false;\n this.adapter._emitAvailabilityChanged(llm, false);\n }\n\n // Check if we sent data before failing\n if (textSent || toolCallsSent.length > 0) {\n const extra = { textSent, toolCallsSent };\n\n if (!this.adapter.retryOnChunkSent) {\n this._log.error(\n { llm: llm.label(), ...extra },\n 'failed after sending chunk, skip retrying. Set `retryOnChunkSent` to `true` to enable.',\n );\n throw error;\n }\n\n this._log.warn(\n { llm: llm.label(), ...extra },\n 'failed after sending chunk, retrying...',\n );\n }\n }\n }\n\n // Trigger background recovery for this LLM\n this.tryRecovery(llm, i);\n }\n\n // All LLMs failed\n const duration = (Date.now() - startTime) / 1000;\n const labels = this.adapter.llms.map((l) => l.label()).join(', ');\n throw new APIConnectionError({\n message: `all LLMs failed (${labels}) after ${duration.toFixed(2)}s`,\n });\n }\n}\n"],"mappings":"AAGA,SAAS,oBAAoB,gBAAgB;AAC7C,SAAS,WAAW;AACpB,SAAiC,mCAAmC;AAGpE,SAAS,KAAK,iBAAiB;AAO/B,MAAM,uCAA0D;AAAA,EAC9D,UAAU;AAAA,EACV,WAAW,4BAA4B;AAAA,EACvC,iBAAiB,4BAA4B;AAC/C;AA8CO,MAAM,wBAAwB,IAAI;AAAA,EAC9B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGT;AAAA,EAEQ,SAAS,IAAI;AAAA,EAErB,YAAY,SAAiC;AAC3C,UAAM;AAEN,QAAI,CAAC,QAAQ,QAAQ,QAAQ,KAAK,SAAS,GAAG;AAC5C,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,OAAO,QAAQ;AACpB,SAAK,iBAAiB,QAAQ,kBAAkB;AAChD,SAAK,iBAAiB,QAAQ,kBAAkB;AAChD,SAAK,gBAAgB,QAAQ,iBAAiB;AAC9C,SAAK,mBAAmB,QAAQ,oBAAoB;AAGpD,SAAK,UAAU,KAAK,KAAK,IAAI,OAAO;AAAA,MAClC,WAAW;AAAA,MACX,gBAAgB;AAAA,IAClB,EAAE;AAGF,eAAW,OAAO,KAAK,MAAM;AAC3B,UAAI,GAAG,qBAAqB,CAAC,YAAY;AACvC,aAAK,KAAK,qBAAqB,OAAO;AAAA,MACxC,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,QAAgB;AACd,WAAO;AAAA,EACT;AAAA,EAEA,KAAK,MAOS;AACZ,WAAO,IAAI,kBAAkB,MAAM;AAAA,MACjC,SAAS,KAAK;AAAA,MACd,SAAS,KAAK;AAAA,MACd,aAAa,KAAK,eAAe;AAAA,MACjC,mBAAmB,KAAK;AAAA,MACxB,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,IACpB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,yBAAyB,KAAU,WAA0B;AAC3D,UAAM,QAAkC,EAAE,KAAK,UAAU;AAEzD,IAAC,KAAsF;AAAA,MACrF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAMA,MAAM,0BAA0B,UAAU;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,OAAO,IAAI;AAAA,EAEnB,YACE,SACA,MAQA;AACA,UAAM,SAAS;AAAA,MACb,SAAS,KAAK;AAAA,MACd,SAAS,KAAK;AAAA,MACd,aAAa,KAAK;AAAA,IACpB,CAAC;AACD,SAAK,UAAU;AACf,SAAK,oBAAoB,KAAK;AAC9B,SAAK,aAAa,KAAK;AACvB,SAAK,cAAc,KAAK;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAa,UAAuB;AArLtC;AAsLI,aAAO,UAAK,mBAAL,mBAAqB,YAAW,MAAM;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,YACb,KACA,gBAAyB,OACiB;AAC1C,UAAM,cAAiC;AAAA,MACrC,GAAG,KAAK;AAAA,MACR,UAAU,KAAK,QAAQ;AAAA,MACvB,WAAW,KAAK,QAAQ,iBAAiB;AAAA,MACzC,iBAAiB,KAAK,QAAQ,gBAAgB;AAAA,IAChD;AAEA,UAAM,SAAS,IAAI,KAAK;AAAA,MACtB,SAAS,MAAM;AAAA,MACf,SAAS,KAAK;AAAA,MACd;AAAA,MACA,mBAAmB,KAAK;AAAA,MACxB,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,IACpB,CAAC;AAGD,QAAI;AACJ,UAAM,eAAe,CAAC,OAAyB;AAC7C,oBAAc,GAAG;AAAA,IACnB;AACA,QAAI,GAAG,SAAS,YAAY;AAE5B,QAAI;AACF,UAAI,mBAAmB,CAAC;AACxB,uBAAiB,SAAS,QAAQ;AAChC,YAAI,kBAAkB;AACpB,6BAAmB;AACnB,eAAK,iBAAiB;AAAA,QACxB;AACA,cAAM;AAAA,MACR;AAGA,UAAI,aAAa;AACf,cAAM;AAAA,MACR;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB,UAAU;AAC7B,YAAI,eAAe;AACjB,eAAK,KAAK,KAAK,EAAE,KAAK,IAAI,MAAM,GAAG,MAAM,GAAG,iBAAiB;AAAA,QAC/D,OAAO;AACL,eAAK,KAAK,KAAK,EAAE,KAAK,IAAI,MAAM,GAAG,MAAM,GAAG,+BAA+B;AAAA,QAC7E;AACA,cAAM;AAAA,MACR;AAGA,UAAI,iBAAiB,SAAS,MAAM,SAAS,cAAc;AACzD,YAAI,eAAe;AACjB,eAAK,KAAK,KAAK,EAAE,KAAK,IAAI,MAAM,EAAE,GAAG,oBAAoB;AAAA,QAC3D,OAAO;AACL,eAAK,KAAK,KAAK,EAAE,KAAK,IAAI,MAAM,EAAE,GAAG,kCAAkC;AAAA,QACzE;AACA,cAAM;AAAA,MACR;AAGA,UAAI,eAAe;AACjB,aAAK,KAAK,MAAM,EAAE,KAAK,IAAI,MAAM,GAAG,MAAM,GAAG,2BAA2B;AAAA,MAC1E,OAAO;AACL,aAAK,KAAK,MAAM,EAAE,KAAK,IAAI,MAAM,GAAG,MAAM,GAAG,yCAAyC;AAAA,MACxF;AACA,YAAM;AAAA,IACR,UAAE;AACA,UAAI,IAAI,SAAS,YAAY;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,KAAU,OAAqB;AACjD,UAAM,SAAS,KAAK,QAAQ,QAAQ,KAAK;AAGzC,QAAI,OAAO,mBAAmB,MAAM;AAClC;AAAA,IACF;AAEA,UAAM,cAAc,YAA2B;AAC7C,UAAI;AAGF,yBAAiB,UAAU,KAAK,YAAY,KAAK,IAAI,GAAG;AAAA,QAExD;AAGA,eAAO,YAAY;AACnB,aAAK,KAAK,KAAK,EAAE,KAAK,IAAI,MAAM,EAAE,GAAG,eAAe;AACpD,aAAK,QAAQ,yBAAyB,KAAK,IAAI;AAAA,MACjD,QAAQ;AAAA,MAER,UAAE;AACA,eAAO,iBAAiB;AAAA,MAC1B;AAAA,IACF;AAGA,WAAO,iBAAiB,YAAY;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAgB,MAAqB;AACnC,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,YAAY,KAAK,QAAQ,QAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,SAAS;AAChE,QAAI,WAAW;AACb,WAAK,KAAK,MAAM,uCAAuC;AAAA,IACzD;AAEA,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,QAAQ,KAAK;AACjD,YAAM,MAAM,KAAK,QAAQ,KAAK,CAAC;AAC/B,YAAM,SAAS,KAAK,QAAQ,QAAQ,CAAC;AAErC,WAAK,KAAK;AAAA,QACR,EAAE,KAAK,IAAI,MAAM,GAAG,OAAO,GAAG,WAAW,OAAO,WAAW,UAAU;AAAA,QACrE;AAAA,MACF;AAEA,UAAI,OAAO,aAAa,WAAW;AACjC,YAAI,WAAW;AACf,cAAM,gBAA0B,CAAC;AAEjC,YAAI;AACF,eAAK,KAAK,KAAK,EAAE,KAAK,IAAI,MAAM,EAAE,GAAG,sCAAsC;AAE3E,cAAI,aAAa;AACjB,2BAAiB,SAAS,KAAK,YAAY,KAAK,KAAK,GAAG;AACtD;AAEA,gBAAI,MAAM,OAAO;AACf,kBAAI,MAAM,MAAM,SAAS;AACvB,4BAAY,MAAM,MAAM;AAAA,cAC1B;AACA,kBAAI,MAAM,MAAM,WAAW;AACzB,2BAAW,MAAM,MAAM,MAAM,WAAW;AACtC,sBAAI,GAAG,MAAM;AACX,kCAAc,KAAK,GAAG,IAAI;AAAA,kBAC5B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAGA,iBAAK,KAAK,MAAM,EAAE,KAAK,IAAI,MAAM,GAAG,WAAW,GAAG,gCAAgC;AAClF,iBAAK,MAAM,IAAI,KAAK;AAAA,UACtB;AAGA,eAAK,KAAK;AAAA,YACR,EAAE,KAAK,IAAI,MAAM,GAAG,aAAa,YAAY,YAAY,SAAS,OAAO;AAAA,YACzE;AAAA,UACF;AACA;AAAA,QACF,SAAS,OAAO;AAEd,cAAI,OAAO,WAAW;AACpB,mBAAO,YAAY;AACnB,iBAAK,QAAQ,yBAAyB,KAAK,KAAK;AAAA,UAClD;AAGA,cAAI,YAAY,cAAc,SAAS,GAAG;AACxC,kBAAM,QAAQ,EAAE,UAAU,cAAc;AAExC,gBAAI,CAAC,KAAK,QAAQ,kBAAkB;AAClC,mBAAK,KAAK;AAAA,gBACR,EAAE,KAAK,IAAI,MAAM,GAAG,GAAG,MAAM;AAAA,gBAC7B;AAAA,cACF;AACA,oBAAM;AAAA,YACR;AAEA,iBAAK,KAAK;AAAA,cACR,EAAE,KAAK,IAAI,MAAM,GAAG,GAAG,MAAM;AAAA,cAC7B;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,WAAK,YAAY,KAAK,CAAC;AAAA,IACzB;AAGA,UAAM,YAAY,KAAK,IAAI,IAAI,aAAa;AAC5C,UAAM,SAAS,KAAK,QAAQ,KAAK,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,KAAK,IAAI;AAChE,UAAM,IAAI,mBAAmB;AAAA,MAC3B,SAAS,oBAAoB,MAAM,WAAW,SAAS,QAAQ,CAAC,CAAC;AAAA,IACnE,CAAC;AAAA,EACH;AACF;","names":[]}
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var import_vitest = require("vitest");
|
|
3
|
+
var import_exceptions = require("../_exceptions.cjs");
|
|
4
|
+
var import_log = require("../log.cjs");
|
|
5
|
+
var import_utils = require("../utils.cjs");
|
|
6
|
+
var import_fallback_adapter = require("./fallback_adapter.cjs");
|
|
7
|
+
var import_llm = require("./llm.cjs");
|
|
8
|
+
class MockLLMStream extends import_llm.LLMStream {
|
|
9
|
+
constructor(llm, opts, shouldFail = false, failAfterChunks = 0) {
|
|
10
|
+
super(llm, opts);
|
|
11
|
+
this.shouldFail = shouldFail;
|
|
12
|
+
this.failAfterChunks = failAfterChunks;
|
|
13
|
+
this.myLLM = llm;
|
|
14
|
+
}
|
|
15
|
+
myLLM;
|
|
16
|
+
async run() {
|
|
17
|
+
if (this.shouldFail && this.failAfterChunks === 0) {
|
|
18
|
+
throw new import_exceptions.APIError("Mock LLM failed immediately");
|
|
19
|
+
}
|
|
20
|
+
const chunk = {
|
|
21
|
+
id: "test-id",
|
|
22
|
+
delta: { role: "assistant", content: "chunk" }
|
|
23
|
+
};
|
|
24
|
+
for (let i = 0; i < 3; i++) {
|
|
25
|
+
if (this.shouldFail && i === this.failAfterChunks) {
|
|
26
|
+
throw new import_exceptions.APIError("Mock LLM failed after chunks");
|
|
27
|
+
}
|
|
28
|
+
this.queue.put(chunk);
|
|
29
|
+
await (0, import_utils.delay)(10);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
class MockLLM extends import_llm.LLM {
|
|
34
|
+
shouldFail = false;
|
|
35
|
+
failAfterChunks = 0;
|
|
36
|
+
_label;
|
|
37
|
+
constructor(label) {
|
|
38
|
+
super();
|
|
39
|
+
this._label = label;
|
|
40
|
+
}
|
|
41
|
+
label() {
|
|
42
|
+
return this._label;
|
|
43
|
+
}
|
|
44
|
+
chat(opts) {
|
|
45
|
+
return new MockLLMStream(
|
|
46
|
+
this,
|
|
47
|
+
{
|
|
48
|
+
chatCtx: opts.chatCtx,
|
|
49
|
+
toolCtx: opts.toolCtx,
|
|
50
|
+
connOptions: opts.connOptions
|
|
51
|
+
},
|
|
52
|
+
this.shouldFail,
|
|
53
|
+
this.failAfterChunks
|
|
54
|
+
);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
(0, import_vitest.describe)("FallbackAdapter", () => {
|
|
58
|
+
(0, import_vitest.beforeAll)(() => {
|
|
59
|
+
(0, import_log.initializeLogger)({ pretty: false });
|
|
60
|
+
process.on("unhandledRejection", () => {
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
(0, import_vitest.it)("should initialize correctly", () => {
|
|
64
|
+
const llm1 = new MockLLM("llm1");
|
|
65
|
+
const adapter = new import_fallback_adapter.FallbackAdapter({ llms: [llm1] });
|
|
66
|
+
(0, import_vitest.expect)(adapter.llms).toHaveLength(1);
|
|
67
|
+
(0, import_vitest.expect)(adapter.llms[0]).toBe(llm1);
|
|
68
|
+
});
|
|
69
|
+
(0, import_vitest.it)("should throw if no LLMs provided", () => {
|
|
70
|
+
(0, import_vitest.expect)(() => new import_fallback_adapter.FallbackAdapter({ llms: [] })).toThrow();
|
|
71
|
+
});
|
|
72
|
+
(0, import_vitest.it)("should use primary LLM if successful", async () => {
|
|
73
|
+
const llm1 = new MockLLM("llm1");
|
|
74
|
+
const llm2 = new MockLLM("llm2");
|
|
75
|
+
const adapter = new import_fallback_adapter.FallbackAdapter({ llms: [llm1, llm2] });
|
|
76
|
+
const stream = adapter.chat({
|
|
77
|
+
chatCtx: {}
|
|
78
|
+
});
|
|
79
|
+
const chunks = [];
|
|
80
|
+
for await (const chunk of stream) {
|
|
81
|
+
chunks.push(chunk);
|
|
82
|
+
}
|
|
83
|
+
(0, import_vitest.expect)(chunks).toHaveLength(3);
|
|
84
|
+
});
|
|
85
|
+
(0, import_vitest.it)("should fallback to second LLM if first fails immediately", async () => {
|
|
86
|
+
const llm1 = new MockLLM("llm1");
|
|
87
|
+
llm1.shouldFail = true;
|
|
88
|
+
const llm2 = new MockLLM("llm2");
|
|
89
|
+
const adapter = new import_fallback_adapter.FallbackAdapter({ llms: [llm1, llm2] });
|
|
90
|
+
const stream = adapter.chat({
|
|
91
|
+
chatCtx: {}
|
|
92
|
+
});
|
|
93
|
+
const chunks = [];
|
|
94
|
+
for await (const chunk of stream) {
|
|
95
|
+
chunks.push(chunk);
|
|
96
|
+
}
|
|
97
|
+
(0, import_vitest.expect)(chunks).toHaveLength(3);
|
|
98
|
+
(0, import_vitest.expect)(adapter._status[0].available).toBe(false);
|
|
99
|
+
(0, import_vitest.expect)(adapter._status[1].available).toBe(true);
|
|
100
|
+
});
|
|
101
|
+
(0, import_vitest.it)("should fail if all LLMs fail", async () => {
|
|
102
|
+
const llm1 = new MockLLM("llm1");
|
|
103
|
+
llm1.shouldFail = true;
|
|
104
|
+
const llm2 = new MockLLM("llm2");
|
|
105
|
+
llm2.shouldFail = true;
|
|
106
|
+
const adapter = new import_fallback_adapter.FallbackAdapter({ llms: [llm1, llm2] });
|
|
107
|
+
const stream = adapter.chat({
|
|
108
|
+
chatCtx: {}
|
|
109
|
+
});
|
|
110
|
+
const errorPromise = new Promise((resolve) => {
|
|
111
|
+
adapter.on("error", (e) => resolve(e.error));
|
|
112
|
+
});
|
|
113
|
+
for await (const _ of stream) {
|
|
114
|
+
}
|
|
115
|
+
const error = await errorPromise;
|
|
116
|
+
(0, import_vitest.expect)(error).toBeInstanceOf(import_exceptions.APIConnectionError);
|
|
117
|
+
});
|
|
118
|
+
(0, import_vitest.it)("should fail if chunks sent and retryOnChunkSent is false", async () => {
|
|
119
|
+
const llm1 = new MockLLM("llm1");
|
|
120
|
+
llm1.shouldFail = true;
|
|
121
|
+
llm1.failAfterChunks = 1;
|
|
122
|
+
const llm2 = new MockLLM("llm2");
|
|
123
|
+
const adapter = new import_fallback_adapter.FallbackAdapter({
|
|
124
|
+
llms: [llm1, llm2],
|
|
125
|
+
retryOnChunkSent: false
|
|
126
|
+
});
|
|
127
|
+
const stream = adapter.chat({
|
|
128
|
+
chatCtx: {}
|
|
129
|
+
});
|
|
130
|
+
const errorPromise = new Promise((resolve) => {
|
|
131
|
+
adapter.on("error", (e) => resolve(e.error));
|
|
132
|
+
});
|
|
133
|
+
for await (const _ of stream) {
|
|
134
|
+
}
|
|
135
|
+
const error = await errorPromise;
|
|
136
|
+
(0, import_vitest.expect)(error).toBeInstanceOf(import_exceptions.APIError);
|
|
137
|
+
});
|
|
138
|
+
(0, import_vitest.it)("should fallback if chunks sent and retryOnChunkSent is true", async () => {
|
|
139
|
+
const llm1 = new MockLLM("llm1");
|
|
140
|
+
llm1.shouldFail = true;
|
|
141
|
+
llm1.failAfterChunks = 1;
|
|
142
|
+
const llm2 = new MockLLM("llm2");
|
|
143
|
+
const adapter = new import_fallback_adapter.FallbackAdapter({
|
|
144
|
+
llms: [llm1, llm2],
|
|
145
|
+
retryOnChunkSent: true
|
|
146
|
+
});
|
|
147
|
+
const stream = adapter.chat({
|
|
148
|
+
chatCtx: {}
|
|
149
|
+
});
|
|
150
|
+
const chunks = [];
|
|
151
|
+
for await (const chunk of stream) {
|
|
152
|
+
chunks.push(chunk);
|
|
153
|
+
}
|
|
154
|
+
(0, import_vitest.expect)(chunks).toHaveLength(4);
|
|
155
|
+
});
|
|
156
|
+
(0, import_vitest.it)("should emit availability changed events", async () => {
|
|
157
|
+
const llm1 = new MockLLM("llm1");
|
|
158
|
+
llm1.shouldFail = true;
|
|
159
|
+
const llm2 = new MockLLM("llm2");
|
|
160
|
+
const adapter = new import_fallback_adapter.FallbackAdapter({ llms: [llm1, llm2] });
|
|
161
|
+
const eventSpy = import_vitest.vi.fn();
|
|
162
|
+
adapter.on("llm_availability_changed", eventSpy);
|
|
163
|
+
const stream = adapter.chat({
|
|
164
|
+
chatCtx: {}
|
|
165
|
+
});
|
|
166
|
+
for await (const _ of stream) {
|
|
167
|
+
}
|
|
168
|
+
(0, import_vitest.expect)(eventSpy).toHaveBeenCalledWith(
|
|
169
|
+
import_vitest.expect.objectContaining({
|
|
170
|
+
llm: llm1,
|
|
171
|
+
available: false
|
|
172
|
+
})
|
|
173
|
+
);
|
|
174
|
+
});
|
|
175
|
+
});
|
|
176
|
+
//# sourceMappingURL=fallback_adapter.test.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/llm/fallback_adapter.test.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { beforeAll, describe, expect, it, vi } from 'vitest';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { initializeLogger } from '../log.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { delay } from '../utils.js';\nimport type { ChatContext } from './chat_context.js';\nimport { FallbackAdapter } from './fallback_adapter.js';\nimport { type ChatChunk, LLM, LLMStream } from './llm.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nclass MockLLMStream extends LLMStream {\n public myLLM: LLM;\n\n constructor(\n llm: LLM,\n opts: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n private shouldFail: boolean = false,\n private failAfterChunks: number = 0,\n ) {\n super(llm, opts);\n this.myLLM = llm;\n }\n\n protected async run(): Promise<void> {\n if (this.shouldFail && this.failAfterChunks === 0) {\n throw new APIError('Mock LLM failed immediately');\n }\n\n const chunk: ChatChunk = {\n id: 'test-id',\n delta: { role: 'assistant', content: 'chunk' },\n };\n\n for (let i = 0; i < 3; i++) {\n if (this.shouldFail && i === this.failAfterChunks) {\n throw new APIError('Mock LLM failed after chunks');\n }\n this.queue.put(chunk);\n await delay(10);\n }\n }\n}\n\nclass MockLLM extends LLM {\n shouldFail: boolean = false;\n failAfterChunks: number = 0;\n private _label: string;\n\n constructor(label: string) {\n super();\n this._label = label;\n }\n\n label(): string {\n return this._label;\n }\n\n chat(opts: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream {\n return new MockLLMStream(\n this,\n {\n chatCtx: opts.chatCtx,\n toolCtx: opts.toolCtx,\n connOptions: opts.connOptions!,\n },\n this.shouldFail,\n this.failAfterChunks,\n );\n }\n}\n\ndescribe('FallbackAdapter', () => {\n beforeAll(() => {\n initializeLogger({ pretty: false });\n // Suppress unhandled rejections from LLMStream background tasks\n process.on('unhandledRejection', () => {});\n });\n\n it('should initialize correctly', () => {\n const llm1 = new MockLLM('llm1');\n const adapter = new FallbackAdapter({ llms: [llm1] });\n expect(adapter.llms).toHaveLength(1);\n expect(adapter.llms[0]).toBe(llm1);\n });\n\n it('should throw if no LLMs provided', () => {\n expect(() => new FallbackAdapter({ llms: [] })).toThrow();\n });\n\n it('should use primary LLM if successful', async () => {\n const llm1 = new MockLLM('llm1');\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({ llms: [llm1, llm2] });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const chunks: ChatChunk[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n\n expect(chunks).toHaveLength(3);\n // Should verify it used llm1 (we can check logs or spy, but simple success is good first step)\n });\n\n it('should fallback to second LLM if first fails immediately', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({ llms: [llm1, llm2] });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const chunks: ChatChunk[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n\n expect(chunks).toHaveLength(3);\n expect(adapter._status[0]!.available).toBe(false);\n expect(adapter._status[1]!.available).toBe(true);\n });\n\n it('should fail if all LLMs fail', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n const llm2 = new MockLLM('llm2');\n llm2.shouldFail = true;\n const adapter = new FallbackAdapter({ llms: [llm1, llm2] });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const errorPromise = new Promise<Error>((resolve) => {\n adapter.on('error', (e) => resolve(e.error));\n });\n\n for await (const _ of stream) {\n // consume\n }\n\n const error = await errorPromise;\n expect(error).toBeInstanceOf(APIConnectionError);\n });\n\n it('should fail if chunks sent and retryOnChunkSent is false', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n llm1.failAfterChunks = 1; // Fail after 1 chunk\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({\n llms: [llm1, llm2],\n retryOnChunkSent: false,\n });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const errorPromise = new Promise<Error>((resolve) => {\n adapter.on('error', (e) => resolve(e.error));\n });\n\n for await (const _ of stream) {\n // consume\n }\n\n const error = await errorPromise;\n expect(error).toBeInstanceOf(APIError);\n });\n\n it('should fallback if chunks sent and retryOnChunkSent is true', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n llm1.failAfterChunks = 1;\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({\n llms: [llm1, llm2],\n retryOnChunkSent: true,\n });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const chunks: ChatChunk[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n\n // 1 chunk from failed llm1 + 3 chunks from llm2\n expect(chunks).toHaveLength(4);\n });\n\n it('should emit availability changed events', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({ llms: [llm1, llm2] });\n\n const eventSpy = vi.fn();\n (adapter as any).on('llm_availability_changed', eventSpy);\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n for await (const _ of stream) {\n // consume\n }\n\n expect(eventSpy).toHaveBeenCalledWith(\n expect.objectContaining({\n llm: llm1,\n available: false,\n }),\n );\n });\n});\n"],"mappings":";AAGA,oBAAoD;AACpD,wBAA6C;AAC7C,iBAAiC;AAEjC,mBAAsB;AAEtB,8BAAgC;AAChC,iBAA+C;AAG/C,MAAM,sBAAsB,qBAAU;AAAA,EAGpC,YACE,KACA,MAKQ,aAAsB,OACtB,kBAA0B,GAClC;AACA,UAAM,KAAK,IAAI;AAHP;AACA;AAGR,SAAK,QAAQ;AAAA,EACf;AAAA,EAdO;AAAA,EAgBP,MAAgB,MAAqB;AACnC,QAAI,KAAK,cAAc,KAAK,oBAAoB,GAAG;AACjD,YAAM,IAAI,2BAAS,6BAA6B;AAAA,IAClD;AAEA,UAAM,QAAmB;AAAA,MACvB,IAAI;AAAA,MACJ,OAAO,EAAE,MAAM,aAAa,SAAS,QAAQ;AAAA,IAC/C;AAEA,aAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,UAAI,KAAK,cAAc,MAAM,KAAK,iBAAiB;AACjD,cAAM,IAAI,2BAAS,8BAA8B;AAAA,MACnD;AACA,WAAK,MAAM,IAAI,KAAK;AACpB,gBAAM,oBAAM,EAAE;AAAA,IAChB;AAAA,EACF;AACF;AAEA,MAAM,gBAAgB,eAAI;AAAA,EACxB,aAAsB;AAAA,EACtB,kBAA0B;AAAA,EAClB;AAAA,EAER,YAAY,OAAe;AACzB,UAAM;AACN,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,KAAK,MAOS;AACZ,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,QACE,SAAS,KAAK;AAAA,QACd,SAAS,KAAK;AAAA,QACd,aAAa,KAAK;AAAA,MACpB;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AACF;AAAA,IAEA,wBAAS,mBAAmB,MAAM;AAChC,+BAAU,MAAM;AACd,qCAAiB,EAAE,QAAQ,MAAM,CAAC;AAElC,YAAQ,GAAG,sBAAsB,MAAM;AAAA,IAAC,CAAC;AAAA,EAC3C,CAAC;AAED,wBAAG,+BAA+B,MAAM;AACtC,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,wCAAgB,EAAE,MAAM,CAAC,IAAI,EAAE,CAAC;AACpD,8BAAO,QAAQ,IAAI,EAAE,aAAa,CAAC;AACnC,8BAAO,QAAQ,KAAK,CAAC,CAAC,EAAE,KAAK,IAAI;AAAA,EACnC,CAAC;AAED,wBAAG,oCAAoC,MAAM;AAC3C,8BAAO,MAAM,IAAI,wCAAgB,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,EAAE,QAAQ;AAAA,EAC1D,CAAC;AAED,wBAAG,wCAAwC,YAAY;AACrD,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,wCAAgB,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;AAE1D,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,SAAsB,CAAC;AAC7B,qBAAiB,SAAS,QAAQ;AAChC,aAAO,KAAK,KAAK;AAAA,IACnB;AAEA,8BAAO,MAAM,EAAE,aAAa,CAAC;AAAA,EAE/B,CAAC;AAED,wBAAG,4DAA4D,YAAY;AACzE,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,wCAAgB,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;AAE1D,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,SAAsB,CAAC;AAC7B,qBAAiB,SAAS,QAAQ;AAChC,aAAO,KAAK,KAAK;AAAA,IACnB;AAEA,8BAAO,MAAM,EAAE,aAAa,CAAC;AAC7B,8BAAO,QAAQ,QAAQ,CAAC,EAAG,SAAS,EAAE,KAAK,KAAK;AAChD,8BAAO,QAAQ,QAAQ,CAAC,EAAG,SAAS,EAAE,KAAK,IAAI;AAAA,EACjD,CAAC;AAED,wBAAG,gCAAgC,YAAY;AAC7C,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,UAAM,UAAU,IAAI,wCAAgB,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;AAE1D,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,eAAe,IAAI,QAAe,CAAC,YAAY;AACnD,cAAQ,GAAG,SAAS,CAAC,MAAM,QAAQ,EAAE,KAAK,CAAC;AAAA,IAC7C,CAAC;AAED,qBAAiB,KAAK,QAAQ;AAAA,IAE9B;AAEA,UAAM,QAAQ,MAAM;AACpB,8BAAO,KAAK,EAAE,eAAe,oCAAkB;AAAA,EACjD,CAAC;AAED,wBAAG,4DAA4D,YAAY;AACzE,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,wCAAgB;AAAA,MAClC,MAAM,CAAC,MAAM,IAAI;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AAED,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,eAAe,IAAI,QAAe,CAAC,YAAY;AACnD,cAAQ,GAAG,SAAS,CAAC,MAAM,QAAQ,EAAE,KAAK,CAAC;AAAA,IAC7C,CAAC;AAED,qBAAiB,KAAK,QAAQ;AAAA,IAE9B;AAEA,UAAM,QAAQ,MAAM;AACpB,8BAAO,KAAK,EAAE,eAAe,0BAAQ;AAAA,EACvC,CAAC;AAED,wBAAG,+DAA+D,YAAY;AAC5E,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,wCAAgB;AAAA,MAClC,MAAM,CAAC,MAAM,IAAI;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AAED,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,SAAsB,CAAC;AAC7B,qBAAiB,SAAS,QAAQ;AAChC,aAAO,KAAK,KAAK;AAAA,IACnB;AAGA,8BAAO,MAAM,EAAE,aAAa,CAAC;AAAA,EAC/B,CAAC;AAED,wBAAG,2CAA2C,YAAY;AACxD,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,wCAAgB,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;AAE1D,UAAM,WAAW,iBAAG,GAAG;AACvB,IAAC,QAAgB,GAAG,4BAA4B,QAAQ;AAExD,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,qBAAiB,KAAK,QAAQ;AAAA,IAE9B;AAEA,8BAAO,QAAQ,EAAE;AAAA,MACf,qBAAO,iBAAiB;AAAA,QACtB,KAAK;AAAA,QACL,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH,CAAC;","names":[]}
|