@livekit/agents 1.0.32 → 1.0.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/inference/llm.cjs +0 -2
- package/dist/inference/llm.cjs.map +1 -1
- package/dist/inference/llm.d.ts.map +1 -1
- package/dist/inference/llm.js +0 -2
- package/dist/inference/llm.js.map +1 -1
- package/dist/llm/fallback_adapter.cjs +278 -0
- package/dist/llm/fallback_adapter.cjs.map +1 -0
- package/dist/llm/fallback_adapter.d.cts +73 -0
- package/dist/llm/fallback_adapter.d.ts +73 -0
- package/dist/llm/fallback_adapter.d.ts.map +1 -0
- package/dist/llm/fallback_adapter.js +254 -0
- package/dist/llm/fallback_adapter.js.map +1 -0
- package/dist/llm/fallback_adapter.test.cjs +176 -0
- package/dist/llm/fallback_adapter.test.cjs.map +1 -0
- package/dist/llm/fallback_adapter.test.js +175 -0
- package/dist/llm/fallback_adapter.test.js.map +1 -0
- package/dist/llm/index.cjs +3 -0
- package/dist/llm/index.cjs.map +1 -1
- package/dist/llm/index.d.cts +1 -0
- package/dist/llm/index.d.ts +1 -0
- package/dist/llm/index.d.ts.map +1 -1
- package/dist/llm/index.js +4 -0
- package/dist/llm/index.js.map +1 -1
- package/dist/llm/llm.cjs +1 -1
- package/dist/llm/llm.cjs.map +1 -1
- package/dist/llm/llm.js +1 -1
- package/dist/llm/llm.js.map +1 -1
- package/dist/log.cjs +3 -3
- package/dist/log.cjs.map +1 -1
- package/dist/log.d.cts +0 -5
- package/dist/log.d.ts +0 -5
- package/dist/log.d.ts.map +1 -1
- package/dist/log.js +3 -3
- package/dist/log.js.map +1 -1
- package/dist/stt/stt.cjs +1 -1
- package/dist/stt/stt.cjs.map +1 -1
- package/dist/stt/stt.js +1 -1
- package/dist/stt/stt.js.map +1 -1
- package/dist/tts/tts.cjs +2 -2
- package/dist/tts/tts.cjs.map +1 -1
- package/dist/tts/tts.js +2 -2
- package/dist/tts/tts.js.map +1 -1
- package/package.json +1 -1
- package/src/inference/llm.ts +0 -2
- package/src/llm/fallback_adapter.test.ts +238 -0
- package/src/llm/fallback_adapter.ts +391 -0
- package/src/llm/index.ts +6 -0
- package/src/llm/llm.ts +1 -1
- package/src/log.ts +3 -9
- package/src/stt/stt.ts +1 -1
- package/src/tts/tts.ts +2 -2
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
import { beforeAll, describe, expect, it, vi } from "vitest";
|
|
2
|
+
import { APIConnectionError, APIError } from "../_exceptions.js";
|
|
3
|
+
import { initializeLogger } from "../log.js";
|
|
4
|
+
import { delay } from "../utils.js";
|
|
5
|
+
import { FallbackAdapter } from "./fallback_adapter.js";
|
|
6
|
+
import { LLM, LLMStream } from "./llm.js";
|
|
7
|
+
class MockLLMStream extends LLMStream {
|
|
8
|
+
constructor(llm, opts, shouldFail = false, failAfterChunks = 0) {
|
|
9
|
+
super(llm, opts);
|
|
10
|
+
this.shouldFail = shouldFail;
|
|
11
|
+
this.failAfterChunks = failAfterChunks;
|
|
12
|
+
this.myLLM = llm;
|
|
13
|
+
}
|
|
14
|
+
myLLM;
|
|
15
|
+
async run() {
|
|
16
|
+
if (this.shouldFail && this.failAfterChunks === 0) {
|
|
17
|
+
throw new APIError("Mock LLM failed immediately");
|
|
18
|
+
}
|
|
19
|
+
const chunk = {
|
|
20
|
+
id: "test-id",
|
|
21
|
+
delta: { role: "assistant", content: "chunk" }
|
|
22
|
+
};
|
|
23
|
+
for (let i = 0; i < 3; i++) {
|
|
24
|
+
if (this.shouldFail && i === this.failAfterChunks) {
|
|
25
|
+
throw new APIError("Mock LLM failed after chunks");
|
|
26
|
+
}
|
|
27
|
+
this.queue.put(chunk);
|
|
28
|
+
await delay(10);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
class MockLLM extends LLM {
|
|
33
|
+
shouldFail = false;
|
|
34
|
+
failAfterChunks = 0;
|
|
35
|
+
_label;
|
|
36
|
+
constructor(label) {
|
|
37
|
+
super();
|
|
38
|
+
this._label = label;
|
|
39
|
+
}
|
|
40
|
+
label() {
|
|
41
|
+
return this._label;
|
|
42
|
+
}
|
|
43
|
+
chat(opts) {
|
|
44
|
+
return new MockLLMStream(
|
|
45
|
+
this,
|
|
46
|
+
{
|
|
47
|
+
chatCtx: opts.chatCtx,
|
|
48
|
+
toolCtx: opts.toolCtx,
|
|
49
|
+
connOptions: opts.connOptions
|
|
50
|
+
},
|
|
51
|
+
this.shouldFail,
|
|
52
|
+
this.failAfterChunks
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
describe("FallbackAdapter", () => {
|
|
57
|
+
beforeAll(() => {
|
|
58
|
+
initializeLogger({ pretty: false });
|
|
59
|
+
process.on("unhandledRejection", () => {
|
|
60
|
+
});
|
|
61
|
+
});
|
|
62
|
+
it("should initialize correctly", () => {
|
|
63
|
+
const llm1 = new MockLLM("llm1");
|
|
64
|
+
const adapter = new FallbackAdapter({ llms: [llm1] });
|
|
65
|
+
expect(adapter.llms).toHaveLength(1);
|
|
66
|
+
expect(adapter.llms[0]).toBe(llm1);
|
|
67
|
+
});
|
|
68
|
+
it("should throw if no LLMs provided", () => {
|
|
69
|
+
expect(() => new FallbackAdapter({ llms: [] })).toThrow();
|
|
70
|
+
});
|
|
71
|
+
it("should use primary LLM if successful", async () => {
|
|
72
|
+
const llm1 = new MockLLM("llm1");
|
|
73
|
+
const llm2 = new MockLLM("llm2");
|
|
74
|
+
const adapter = new FallbackAdapter({ llms: [llm1, llm2] });
|
|
75
|
+
const stream = adapter.chat({
|
|
76
|
+
chatCtx: {}
|
|
77
|
+
});
|
|
78
|
+
const chunks = [];
|
|
79
|
+
for await (const chunk of stream) {
|
|
80
|
+
chunks.push(chunk);
|
|
81
|
+
}
|
|
82
|
+
expect(chunks).toHaveLength(3);
|
|
83
|
+
});
|
|
84
|
+
it("should fallback to second LLM if first fails immediately", async () => {
|
|
85
|
+
const llm1 = new MockLLM("llm1");
|
|
86
|
+
llm1.shouldFail = true;
|
|
87
|
+
const llm2 = new MockLLM("llm2");
|
|
88
|
+
const adapter = new FallbackAdapter({ llms: [llm1, llm2] });
|
|
89
|
+
const stream = adapter.chat({
|
|
90
|
+
chatCtx: {}
|
|
91
|
+
});
|
|
92
|
+
const chunks = [];
|
|
93
|
+
for await (const chunk of stream) {
|
|
94
|
+
chunks.push(chunk);
|
|
95
|
+
}
|
|
96
|
+
expect(chunks).toHaveLength(3);
|
|
97
|
+
expect(adapter._status[0].available).toBe(false);
|
|
98
|
+
expect(adapter._status[1].available).toBe(true);
|
|
99
|
+
});
|
|
100
|
+
it("should fail if all LLMs fail", async () => {
|
|
101
|
+
const llm1 = new MockLLM("llm1");
|
|
102
|
+
llm1.shouldFail = true;
|
|
103
|
+
const llm2 = new MockLLM("llm2");
|
|
104
|
+
llm2.shouldFail = true;
|
|
105
|
+
const adapter = new FallbackAdapter({ llms: [llm1, llm2] });
|
|
106
|
+
const stream = adapter.chat({
|
|
107
|
+
chatCtx: {}
|
|
108
|
+
});
|
|
109
|
+
const errorPromise = new Promise((resolve) => {
|
|
110
|
+
adapter.on("error", (e) => resolve(e.error));
|
|
111
|
+
});
|
|
112
|
+
for await (const _ of stream) {
|
|
113
|
+
}
|
|
114
|
+
const error = await errorPromise;
|
|
115
|
+
expect(error).toBeInstanceOf(APIConnectionError);
|
|
116
|
+
});
|
|
117
|
+
it("should fail if chunks sent and retryOnChunkSent is false", async () => {
|
|
118
|
+
const llm1 = new MockLLM("llm1");
|
|
119
|
+
llm1.shouldFail = true;
|
|
120
|
+
llm1.failAfterChunks = 1;
|
|
121
|
+
const llm2 = new MockLLM("llm2");
|
|
122
|
+
const adapter = new FallbackAdapter({
|
|
123
|
+
llms: [llm1, llm2],
|
|
124
|
+
retryOnChunkSent: false
|
|
125
|
+
});
|
|
126
|
+
const stream = adapter.chat({
|
|
127
|
+
chatCtx: {}
|
|
128
|
+
});
|
|
129
|
+
const errorPromise = new Promise((resolve) => {
|
|
130
|
+
adapter.on("error", (e) => resolve(e.error));
|
|
131
|
+
});
|
|
132
|
+
for await (const _ of stream) {
|
|
133
|
+
}
|
|
134
|
+
const error = await errorPromise;
|
|
135
|
+
expect(error).toBeInstanceOf(APIError);
|
|
136
|
+
});
|
|
137
|
+
it("should fallback if chunks sent and retryOnChunkSent is true", async () => {
|
|
138
|
+
const llm1 = new MockLLM("llm1");
|
|
139
|
+
llm1.shouldFail = true;
|
|
140
|
+
llm1.failAfterChunks = 1;
|
|
141
|
+
const llm2 = new MockLLM("llm2");
|
|
142
|
+
const adapter = new FallbackAdapter({
|
|
143
|
+
llms: [llm1, llm2],
|
|
144
|
+
retryOnChunkSent: true
|
|
145
|
+
});
|
|
146
|
+
const stream = adapter.chat({
|
|
147
|
+
chatCtx: {}
|
|
148
|
+
});
|
|
149
|
+
const chunks = [];
|
|
150
|
+
for await (const chunk of stream) {
|
|
151
|
+
chunks.push(chunk);
|
|
152
|
+
}
|
|
153
|
+
expect(chunks).toHaveLength(4);
|
|
154
|
+
});
|
|
155
|
+
it("should emit availability changed events", async () => {
|
|
156
|
+
const llm1 = new MockLLM("llm1");
|
|
157
|
+
llm1.shouldFail = true;
|
|
158
|
+
const llm2 = new MockLLM("llm2");
|
|
159
|
+
const adapter = new FallbackAdapter({ llms: [llm1, llm2] });
|
|
160
|
+
const eventSpy = vi.fn();
|
|
161
|
+
adapter.on("llm_availability_changed", eventSpy);
|
|
162
|
+
const stream = adapter.chat({
|
|
163
|
+
chatCtx: {}
|
|
164
|
+
});
|
|
165
|
+
for await (const _ of stream) {
|
|
166
|
+
}
|
|
167
|
+
expect(eventSpy).toHaveBeenCalledWith(
|
|
168
|
+
expect.objectContaining({
|
|
169
|
+
llm: llm1,
|
|
170
|
+
available: false
|
|
171
|
+
})
|
|
172
|
+
);
|
|
173
|
+
});
|
|
174
|
+
});
|
|
175
|
+
//# sourceMappingURL=fallback_adapter.test.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/llm/fallback_adapter.test.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { beforeAll, describe, expect, it, vi } from 'vitest';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { initializeLogger } from '../log.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { delay } from '../utils.js';\nimport type { ChatContext } from './chat_context.js';\nimport { FallbackAdapter } from './fallback_adapter.js';\nimport { type ChatChunk, LLM, LLMStream } from './llm.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nclass MockLLMStream extends LLMStream {\n public myLLM: LLM;\n\n constructor(\n llm: LLM,\n opts: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n private shouldFail: boolean = false,\n private failAfterChunks: number = 0,\n ) {\n super(llm, opts);\n this.myLLM = llm;\n }\n\n protected async run(): Promise<void> {\n if (this.shouldFail && this.failAfterChunks === 0) {\n throw new APIError('Mock LLM failed immediately');\n }\n\n const chunk: ChatChunk = {\n id: 'test-id',\n delta: { role: 'assistant', content: 'chunk' },\n };\n\n for (let i = 0; i < 3; i++) {\n if (this.shouldFail && i === this.failAfterChunks) {\n throw new APIError('Mock LLM failed after chunks');\n }\n this.queue.put(chunk);\n await delay(10);\n }\n }\n}\n\nclass MockLLM extends LLM {\n shouldFail: boolean = false;\n failAfterChunks: number = 0;\n private _label: string;\n\n constructor(label: string) {\n super();\n this._label = label;\n }\n\n label(): string {\n return this._label;\n }\n\n chat(opts: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream {\n return new MockLLMStream(\n this,\n {\n chatCtx: opts.chatCtx,\n toolCtx: opts.toolCtx,\n connOptions: opts.connOptions!,\n },\n this.shouldFail,\n this.failAfterChunks,\n );\n }\n}\n\ndescribe('FallbackAdapter', () => {\n beforeAll(() => {\n initializeLogger({ pretty: false });\n // Suppress unhandled rejections from LLMStream background tasks\n process.on('unhandledRejection', () => {});\n });\n\n it('should initialize correctly', () => {\n const llm1 = new MockLLM('llm1');\n const adapter = new FallbackAdapter({ llms: [llm1] });\n expect(adapter.llms).toHaveLength(1);\n expect(adapter.llms[0]).toBe(llm1);\n });\n\n it('should throw if no LLMs provided', () => {\n expect(() => new FallbackAdapter({ llms: [] })).toThrow();\n });\n\n it('should use primary LLM if successful', async () => {\n const llm1 = new MockLLM('llm1');\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({ llms: [llm1, llm2] });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const chunks: ChatChunk[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n\n expect(chunks).toHaveLength(3);\n // Should verify it used llm1 (we can check logs or spy, but simple success is good first step)\n });\n\n it('should fallback to second LLM if first fails immediately', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({ llms: [llm1, llm2] });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const chunks: ChatChunk[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n\n expect(chunks).toHaveLength(3);\n expect(adapter._status[0]!.available).toBe(false);\n expect(adapter._status[1]!.available).toBe(true);\n });\n\n it('should fail if all LLMs fail', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n const llm2 = new MockLLM('llm2');\n llm2.shouldFail = true;\n const adapter = new FallbackAdapter({ llms: [llm1, llm2] });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const errorPromise = new Promise<Error>((resolve) => {\n adapter.on('error', (e) => resolve(e.error));\n });\n\n for await (const _ of stream) {\n // consume\n }\n\n const error = await errorPromise;\n expect(error).toBeInstanceOf(APIConnectionError);\n });\n\n it('should fail if chunks sent and retryOnChunkSent is false', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n llm1.failAfterChunks = 1; // Fail after 1 chunk\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({\n llms: [llm1, llm2],\n retryOnChunkSent: false,\n });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const errorPromise = new Promise<Error>((resolve) => {\n adapter.on('error', (e) => resolve(e.error));\n });\n\n for await (const _ of stream) {\n // consume\n }\n\n const error = await errorPromise;\n expect(error).toBeInstanceOf(APIError);\n });\n\n it('should fallback if chunks sent and retryOnChunkSent is true', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n llm1.failAfterChunks = 1;\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({\n llms: [llm1, llm2],\n retryOnChunkSent: true,\n });\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n const chunks: ChatChunk[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n\n // 1 chunk from failed llm1 + 3 chunks from llm2\n expect(chunks).toHaveLength(4);\n });\n\n it('should emit availability changed events', async () => {\n const llm1 = new MockLLM('llm1');\n llm1.shouldFail = true;\n const llm2 = new MockLLM('llm2');\n const adapter = new FallbackAdapter({ llms: [llm1, llm2] });\n\n const eventSpy = vi.fn();\n (adapter as any).on('llm_availability_changed', eventSpy);\n\n const stream = adapter.chat({\n chatCtx: {} as ChatContext,\n });\n\n for await (const _ of stream) {\n // consume\n }\n\n expect(eventSpy).toHaveBeenCalledWith(\n expect.objectContaining({\n llm: llm1,\n available: false,\n }),\n );\n });\n});\n"],"mappings":"AAGA,SAAS,WAAW,UAAU,QAAQ,IAAI,UAAU;AACpD,SAAS,oBAAoB,gBAAgB;AAC7C,SAAS,wBAAwB;AAEjC,SAAS,aAAa;AAEtB,SAAS,uBAAuB;AAChC,SAAyB,KAAK,iBAAiB;AAG/C,MAAM,sBAAsB,UAAU;AAAA,EAGpC,YACE,KACA,MAKQ,aAAsB,OACtB,kBAA0B,GAClC;AACA,UAAM,KAAK,IAAI;AAHP;AACA;AAGR,SAAK,QAAQ;AAAA,EACf;AAAA,EAdO;AAAA,EAgBP,MAAgB,MAAqB;AACnC,QAAI,KAAK,cAAc,KAAK,oBAAoB,GAAG;AACjD,YAAM,IAAI,SAAS,6BAA6B;AAAA,IAClD;AAEA,UAAM,QAAmB;AAAA,MACvB,IAAI;AAAA,MACJ,OAAO,EAAE,MAAM,aAAa,SAAS,QAAQ;AAAA,IAC/C;AAEA,aAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,UAAI,KAAK,cAAc,MAAM,KAAK,iBAAiB;AACjD,cAAM,IAAI,SAAS,8BAA8B;AAAA,MACnD;AACA,WAAK,MAAM,IAAI,KAAK;AACpB,YAAM,MAAM,EAAE;AAAA,IAChB;AAAA,EACF;AACF;AAEA,MAAM,gBAAgB,IAAI;AAAA,EACxB,aAAsB;AAAA,EACtB,kBAA0B;AAAA,EAClB;AAAA,EAER,YAAY,OAAe;AACzB,UAAM;AACN,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,KAAK,MAOS;AACZ,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,QACE,SAAS,KAAK;AAAA,QACd,SAAS,KAAK;AAAA,QACd,aAAa,KAAK;AAAA,MACpB;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AACF;AAEA,SAAS,mBAAmB,MAAM;AAChC,YAAU,MAAM;AACd,qBAAiB,EAAE,QAAQ,MAAM,CAAC;AAElC,YAAQ,GAAG,sBAAsB,MAAM;AAAA,IAAC,CAAC;AAAA,EAC3C,CAAC;AAED,KAAG,+BAA+B,MAAM;AACtC,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,gBAAgB,EAAE,MAAM,CAAC,IAAI,EAAE,CAAC;AACpD,WAAO,QAAQ,IAAI,EAAE,aAAa,CAAC;AACnC,WAAO,QAAQ,KAAK,CAAC,CAAC,EAAE,KAAK,IAAI;AAAA,EACnC,CAAC;AAED,KAAG,oCAAoC,MAAM;AAC3C,WAAO,MAAM,IAAI,gBAAgB,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,EAAE,QAAQ;AAAA,EAC1D,CAAC;AAED,KAAG,wCAAwC,YAAY;AACrD,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,gBAAgB,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;AAE1D,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,SAAsB,CAAC;AAC7B,qBAAiB,SAAS,QAAQ;AAChC,aAAO,KAAK,KAAK;AAAA,IACnB;AAEA,WAAO,MAAM,EAAE,aAAa,CAAC;AAAA,EAE/B,CAAC;AAED,KAAG,4DAA4D,YAAY;AACzE,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,gBAAgB,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;AAE1D,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,SAAsB,CAAC;AAC7B,qBAAiB,SAAS,QAAQ;AAChC,aAAO,KAAK,KAAK;AAAA,IACnB;AAEA,WAAO,MAAM,EAAE,aAAa,CAAC;AAC7B,WAAO,QAAQ,QAAQ,CAAC,EAAG,SAAS,EAAE,KAAK,KAAK;AAChD,WAAO,QAAQ,QAAQ,CAAC,EAAG,SAAS,EAAE,KAAK,IAAI;AAAA,EACjD,CAAC;AAED,KAAG,gCAAgC,YAAY;AAC7C,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,UAAM,UAAU,IAAI,gBAAgB,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;AAE1D,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,eAAe,IAAI,QAAe,CAAC,YAAY;AACnD,cAAQ,GAAG,SAAS,CAAC,MAAM,QAAQ,EAAE,KAAK,CAAC;AAAA,IAC7C,CAAC;AAED,qBAAiB,KAAK,QAAQ;AAAA,IAE9B;AAEA,UAAM,QAAQ,MAAM;AACpB,WAAO,KAAK,EAAE,eAAe,kBAAkB;AAAA,EACjD,CAAC;AAED,KAAG,4DAA4D,YAAY;AACzE,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,gBAAgB;AAAA,MAClC,MAAM,CAAC,MAAM,IAAI;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AAED,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,eAAe,IAAI,QAAe,CAAC,YAAY;AACnD,cAAQ,GAAG,SAAS,CAAC,MAAM,QAAQ,EAAE,KAAK,CAAC;AAAA,IAC7C,CAAC;AAED,qBAAiB,KAAK,QAAQ;AAAA,IAE9B;AAEA,UAAM,QAAQ,MAAM;AACpB,WAAO,KAAK,EAAE,eAAe,QAAQ;AAAA,EACvC,CAAC;AAED,KAAG,+DAA+D,YAAY;AAC5E,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,gBAAgB;AAAA,MAClC,MAAM,CAAC,MAAM,IAAI;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AAED,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,UAAM,SAAsB,CAAC;AAC7B,qBAAiB,SAAS,QAAQ;AAChC,aAAO,KAAK,KAAK;AAAA,IACnB;AAGA,WAAO,MAAM,EAAE,aAAa,CAAC;AAAA,EAC/B,CAAC;AAED,KAAG,2CAA2C,YAAY;AACxD,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,SAAK,aAAa;AAClB,UAAM,OAAO,IAAI,QAAQ,MAAM;AAC/B,UAAM,UAAU,IAAI,gBAAgB,EAAE,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;AAE1D,UAAM,WAAW,GAAG,GAAG;AACvB,IAAC,QAAgB,GAAG,4BAA4B,QAAQ;AAExD,UAAM,SAAS,QAAQ,KAAK;AAAA,MAC1B,SAAS,CAAC;AAAA,IACZ,CAAC;AAED,qBAAiB,KAAK,QAAQ;AAAA,IAE9B;AAEA,WAAO,QAAQ,EAAE;AAAA,MACf,OAAO,iBAAiB;AAAA,QACtB,KAAK;AAAA,QACL,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH,CAAC;","names":[]}
|
package/dist/llm/index.cjs
CHANGED
|
@@ -21,6 +21,7 @@ __export(llm_exports, {
|
|
|
21
21
|
AgentHandoffItem: () => import_chat_context.AgentHandoffItem,
|
|
22
22
|
ChatContext: () => import_chat_context.ChatContext,
|
|
23
23
|
ChatMessage: () => import_chat_context.ChatMessage,
|
|
24
|
+
FallbackAdapter: () => import_fallback_adapter.FallbackAdapter,
|
|
24
25
|
FunctionCall: () => import_chat_context.FunctionCall,
|
|
25
26
|
FunctionCallOutput: () => import_chat_context.FunctionCallOutput,
|
|
26
27
|
LLM: () => import_llm.LLM,
|
|
@@ -48,11 +49,13 @@ var import_llm = require("./llm.cjs");
|
|
|
48
49
|
var import_realtime = require("./realtime.cjs");
|
|
49
50
|
var import_remote_chat_context = require("./remote_chat_context.cjs");
|
|
50
51
|
var import_utils = require("./utils.cjs");
|
|
52
|
+
var import_fallback_adapter = require("./fallback_adapter.cjs");
|
|
51
53
|
// Annotate the CommonJS export names for ESM import in node:
|
|
52
54
|
0 && (module.exports = {
|
|
53
55
|
AgentHandoffItem,
|
|
54
56
|
ChatContext,
|
|
55
57
|
ChatMessage,
|
|
58
|
+
FallbackAdapter,
|
|
56
59
|
FunctionCall,
|
|
57
60
|
FunctionCallOutput,
|
|
58
61
|
LLM,
|
package/dist/llm/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/llm/index.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nexport {\n handoff,\n isFunctionTool,\n tool,\n ToolError,\n type AgentHandoff,\n type FunctionTool,\n type ProviderDefinedTool,\n type Tool,\n type ToolChoice,\n type ToolContext,\n type ToolOptions,\n type ToolType,\n} from './tool_context.js';\n\nexport {\n AgentHandoffItem,\n ChatContext,\n ChatMessage,\n createAudioContent,\n createImageContent,\n FunctionCall,\n FunctionCallOutput,\n type AudioContent,\n type ChatContent,\n type ChatItem,\n type ChatRole,\n type ImageContent,\n} from './chat_context.js';\n\nexport type { ProviderFormat } from './provider_format/index.js';\n\nexport {\n LLM,\n LLMStream,\n type ChatChunk,\n type ChoiceDelta,\n type CompletionUsage,\n type LLMCallbacks,\n} from './llm.js';\n\nexport {\n RealtimeModel,\n RealtimeSession,\n type GenerationCreatedEvent,\n type InputSpeechStartedEvent,\n type InputSpeechStoppedEvent,\n type InputTranscriptionCompleted,\n type MessageGeneration,\n type RealtimeCapabilities,\n type RealtimeModelError,\n type RealtimeSessionReconnectedEvent,\n} from './realtime.js';\n\nexport { RemoteChatContext } from './remote_chat_context.js';\n\nexport {\n computeChatCtxDiff,\n createToolOptions,\n executeToolCall,\n oaiBuildFunctionInfo,\n oaiParams,\n toJsonSchema,\n type OpenAIFunctionParameters,\n} from './utils.js';\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,0BAaO;AAEP,0BAaO;AAIP,iBAOO;AAEP,sBAWO;AAEP,iCAAkC;AAElC,mBAQO;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../src/llm/index.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nexport {\n handoff,\n isFunctionTool,\n tool,\n ToolError,\n type AgentHandoff,\n type FunctionTool,\n type ProviderDefinedTool,\n type Tool,\n type ToolChoice,\n type ToolContext,\n type ToolOptions,\n type ToolType,\n} from './tool_context.js';\n\nexport {\n AgentHandoffItem,\n ChatContext,\n ChatMessage,\n createAudioContent,\n createImageContent,\n FunctionCall,\n FunctionCallOutput,\n type AudioContent,\n type ChatContent,\n type ChatItem,\n type ChatRole,\n type ImageContent,\n} from './chat_context.js';\n\nexport type { ProviderFormat } from './provider_format/index.js';\n\nexport {\n LLM,\n LLMStream,\n type ChatChunk,\n type ChoiceDelta,\n type CompletionUsage,\n type LLMCallbacks,\n} from './llm.js';\n\nexport {\n RealtimeModel,\n RealtimeSession,\n type GenerationCreatedEvent,\n type InputSpeechStartedEvent,\n type InputSpeechStoppedEvent,\n type InputTranscriptionCompleted,\n type MessageGeneration,\n type RealtimeCapabilities,\n type RealtimeModelError,\n type RealtimeSessionReconnectedEvent,\n} from './realtime.js';\n\nexport { RemoteChatContext } from './remote_chat_context.js';\n\nexport {\n computeChatCtxDiff,\n createToolOptions,\n executeToolCall,\n oaiBuildFunctionInfo,\n oaiParams,\n toJsonSchema,\n type OpenAIFunctionParameters,\n} from './utils.js';\n\nexport {\n FallbackAdapter,\n type AvailabilityChangedEvent,\n type FallbackAdapterOptions,\n} from './fallback_adapter.js';\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,0BAaO;AAEP,0BAaO;AAIP,iBAOO;AAEP,sBAWO;AAEP,iCAAkC;AAElC,mBAQO;AAEP,8BAIO;","names":[]}
|
package/dist/llm/index.d.cts
CHANGED
|
@@ -5,4 +5,5 @@ export { LLM, LLMStream, type ChatChunk, type ChoiceDelta, type CompletionUsage,
|
|
|
5
5
|
export { RealtimeModel, RealtimeSession, type GenerationCreatedEvent, type InputSpeechStartedEvent, type InputSpeechStoppedEvent, type InputTranscriptionCompleted, type MessageGeneration, type RealtimeCapabilities, type RealtimeModelError, type RealtimeSessionReconnectedEvent, } from './realtime.js';
|
|
6
6
|
export { RemoteChatContext } from './remote_chat_context.js';
|
|
7
7
|
export { computeChatCtxDiff, createToolOptions, executeToolCall, oaiBuildFunctionInfo, oaiParams, toJsonSchema, type OpenAIFunctionParameters, } from './utils.js';
|
|
8
|
+
export { FallbackAdapter, type AvailabilityChangedEvent, type FallbackAdapterOptions, } from './fallback_adapter.js';
|
|
8
9
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/llm/index.d.ts
CHANGED
|
@@ -5,4 +5,5 @@ export { LLM, LLMStream, type ChatChunk, type ChoiceDelta, type CompletionUsage,
|
|
|
5
5
|
export { RealtimeModel, RealtimeSession, type GenerationCreatedEvent, type InputSpeechStartedEvent, type InputSpeechStoppedEvent, type InputTranscriptionCompleted, type MessageGeneration, type RealtimeCapabilities, type RealtimeModelError, type RealtimeSessionReconnectedEvent, } from './realtime.js';
|
|
6
6
|
export { RemoteChatContext } from './remote_chat_context.js';
|
|
7
7
|
export { computeChatCtxDiff, createToolOptions, executeToolCall, oaiBuildFunctionInfo, oaiParams, toJsonSchema, type OpenAIFunctionParameters, } from './utils.js';
|
|
8
|
+
export { FallbackAdapter, type AvailabilityChangedEvent, type FallbackAdapterOptions, } from './fallback_adapter.js';
|
|
8
9
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/llm/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/llm/index.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,OAAO,EACP,cAAc,EACd,IAAI,EACJ,SAAS,EACT,KAAK,YAAY,EACjB,KAAK,YAAY,EACjB,KAAK,mBAAmB,EACxB,KAAK,IAAI,EACT,KAAK,UAAU,EACf,KAAK,WAAW,EAChB,KAAK,WAAW,EAChB,KAAK,QAAQ,GACd,MAAM,mBAAmB,CAAC;AAE3B,OAAO,EACL,gBAAgB,EAChB,WAAW,EACX,WAAW,EACX,kBAAkB,EAClB,kBAAkB,EAClB,YAAY,EACZ,kBAAkB,EAClB,KAAK,YAAY,EACjB,KAAK,WAAW,EAChB,KAAK,QAAQ,EACb,KAAK,QAAQ,EACb,KAAK,YAAY,GAClB,MAAM,mBAAmB,CAAC;AAE3B,YAAY,EAAE,cAAc,EAAE,MAAM,4BAA4B,CAAC;AAEjE,OAAO,EACL,GAAG,EACH,SAAS,EACT,KAAK,SAAS,EACd,KAAK,WAAW,EAChB,KAAK,eAAe,EACpB,KAAK,YAAY,GAClB,MAAM,UAAU,CAAC;AAElB,OAAO,EACL,aAAa,EACb,eAAe,EACf,KAAK,sBAAsB,EAC3B,KAAK,uBAAuB,EAC5B,KAAK,uBAAuB,EAC5B,KAAK,2BAA2B,EAChC,KAAK,iBAAiB,EACtB,KAAK,oBAAoB,EACzB,KAAK,kBAAkB,EACvB,KAAK,+BAA+B,GACrC,MAAM,eAAe,CAAC;AAEvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAE7D,OAAO,EACL,kBAAkB,EAClB,iBAAiB,EACjB,eAAe,EACf,oBAAoB,EACpB,SAAS,EACT,YAAY,EACZ,KAAK,wBAAwB,GAC9B,MAAM,YAAY,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/llm/index.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,OAAO,EACP,cAAc,EACd,IAAI,EACJ,SAAS,EACT,KAAK,YAAY,EACjB,KAAK,YAAY,EACjB,KAAK,mBAAmB,EACxB,KAAK,IAAI,EACT,KAAK,UAAU,EACf,KAAK,WAAW,EAChB,KAAK,WAAW,EAChB,KAAK,QAAQ,GACd,MAAM,mBAAmB,CAAC;AAE3B,OAAO,EACL,gBAAgB,EAChB,WAAW,EACX,WAAW,EACX,kBAAkB,EAClB,kBAAkB,EAClB,YAAY,EACZ,kBAAkB,EAClB,KAAK,YAAY,EACjB,KAAK,WAAW,EAChB,KAAK,QAAQ,EACb,KAAK,QAAQ,EACb,KAAK,YAAY,GAClB,MAAM,mBAAmB,CAAC;AAE3B,YAAY,EAAE,cAAc,EAAE,MAAM,4BAA4B,CAAC;AAEjE,OAAO,EACL,GAAG,EACH,SAAS,EACT,KAAK,SAAS,EACd,KAAK,WAAW,EAChB,KAAK,eAAe,EACpB,KAAK,YAAY,GAClB,MAAM,UAAU,CAAC;AAElB,OAAO,EACL,aAAa,EACb,eAAe,EACf,KAAK,sBAAsB,EAC3B,KAAK,uBAAuB,EAC5B,KAAK,uBAAuB,EAC5B,KAAK,2BAA2B,EAChC,KAAK,iBAAiB,EACtB,KAAK,oBAAoB,EACzB,KAAK,kBAAkB,EACvB,KAAK,+BAA+B,GACrC,MAAM,eAAe,CAAC;AAEvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAE7D,OAAO,EACL,kBAAkB,EAClB,iBAAiB,EACjB,eAAe,EACf,oBAAoB,EACpB,SAAS,EACT,YAAY,EACZ,KAAK,wBAAwB,GAC9B,MAAM,YAAY,CAAC;AAEpB,OAAO,EACL,eAAe,EACf,KAAK,wBAAwB,EAC7B,KAAK,sBAAsB,GAC5B,MAAM,uBAAuB,CAAC"}
|
package/dist/llm/index.js
CHANGED
|
@@ -30,10 +30,14 @@ import {
|
|
|
30
30
|
oaiParams,
|
|
31
31
|
toJsonSchema
|
|
32
32
|
} from "./utils.js";
|
|
33
|
+
import {
|
|
34
|
+
FallbackAdapter
|
|
35
|
+
} from "./fallback_adapter.js";
|
|
33
36
|
export {
|
|
34
37
|
AgentHandoffItem,
|
|
35
38
|
ChatContext,
|
|
36
39
|
ChatMessage,
|
|
40
|
+
FallbackAdapter,
|
|
37
41
|
FunctionCall,
|
|
38
42
|
FunctionCallOutput,
|
|
39
43
|
LLM,
|
package/dist/llm/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/llm/index.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nexport {\n handoff,\n isFunctionTool,\n tool,\n ToolError,\n type AgentHandoff,\n type FunctionTool,\n type ProviderDefinedTool,\n type Tool,\n type ToolChoice,\n type ToolContext,\n type ToolOptions,\n type ToolType,\n} from './tool_context.js';\n\nexport {\n AgentHandoffItem,\n ChatContext,\n ChatMessage,\n createAudioContent,\n createImageContent,\n FunctionCall,\n FunctionCallOutput,\n type AudioContent,\n type ChatContent,\n type ChatItem,\n type ChatRole,\n type ImageContent,\n} from './chat_context.js';\n\nexport type { ProviderFormat } from './provider_format/index.js';\n\nexport {\n LLM,\n LLMStream,\n type ChatChunk,\n type ChoiceDelta,\n type CompletionUsage,\n type LLMCallbacks,\n} from './llm.js';\n\nexport {\n RealtimeModel,\n RealtimeSession,\n type GenerationCreatedEvent,\n type InputSpeechStartedEvent,\n type InputSpeechStoppedEvent,\n type InputTranscriptionCompleted,\n type MessageGeneration,\n type RealtimeCapabilities,\n type RealtimeModelError,\n type RealtimeSessionReconnectedEvent,\n} from './realtime.js';\n\nexport { RemoteChatContext } from './remote_chat_context.js';\n\nexport {\n computeChatCtxDiff,\n createToolOptions,\n executeToolCall,\n oaiBuildFunctionInfo,\n oaiParams,\n toJsonSchema,\n type OpenAIFunctionParameters,\n} from './utils.js';\n"],"mappings":"AAGA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OASK;AAEP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAMK;AAIP;AAAA,EACE;AAAA,EACA;AAAA,OAKK;AAEP;AAAA,EACE;AAAA,EACA;AAAA,OASK;AAEP,SAAS,yBAAyB;AAElC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEK;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../src/llm/index.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nexport {\n handoff,\n isFunctionTool,\n tool,\n ToolError,\n type AgentHandoff,\n type FunctionTool,\n type ProviderDefinedTool,\n type Tool,\n type ToolChoice,\n type ToolContext,\n type ToolOptions,\n type ToolType,\n} from './tool_context.js';\n\nexport {\n AgentHandoffItem,\n ChatContext,\n ChatMessage,\n createAudioContent,\n createImageContent,\n FunctionCall,\n FunctionCallOutput,\n type AudioContent,\n type ChatContent,\n type ChatItem,\n type ChatRole,\n type ImageContent,\n} from './chat_context.js';\n\nexport type { ProviderFormat } from './provider_format/index.js';\n\nexport {\n LLM,\n LLMStream,\n type ChatChunk,\n type ChoiceDelta,\n type CompletionUsage,\n type LLMCallbacks,\n} from './llm.js';\n\nexport {\n RealtimeModel,\n RealtimeSession,\n type GenerationCreatedEvent,\n type InputSpeechStartedEvent,\n type InputSpeechStoppedEvent,\n type InputTranscriptionCompleted,\n type MessageGeneration,\n type RealtimeCapabilities,\n type RealtimeModelError,\n type RealtimeSessionReconnectedEvent,\n} from './realtime.js';\n\nexport { RemoteChatContext } from './remote_chat_context.js';\n\nexport {\n computeChatCtxDiff,\n createToolOptions,\n executeToolCall,\n oaiBuildFunctionInfo,\n oaiParams,\n toJsonSchema,\n type OpenAIFunctionParameters,\n} from './utils.js';\n\nexport {\n FallbackAdapter,\n type AvailabilityChangedEvent,\n type FallbackAdapterOptions,\n} from './fallback_adapter.js';\n"],"mappings":"AAGA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OASK;AAEP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAMK;AAIP;AAAA,EACE;AAAA,EACA;AAAA,OAKK;AAEP;AAAA,EACE;AAAA,EACA;AAAA,OASK;AAEP,SAAS,yBAAyB;AAElC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AAEP;AAAA,EACE;AAAA,OAGK;","names":[]}
|
package/dist/llm/llm.cjs
CHANGED
|
@@ -77,7 +77,7 @@ class LLMStream {
|
|
|
77
77
|
this.output.close();
|
|
78
78
|
this.closed = true;
|
|
79
79
|
});
|
|
80
|
-
(0, import_utils.startSoon)(() => this.mainTask().
|
|
80
|
+
(0, import_utils.startSoon)(() => this.mainTask().finally(() => this.queue.close()));
|
|
81
81
|
}
|
|
82
82
|
_mainTaskImpl = async (span) => {
|
|
83
83
|
this.#llmRequestSpan = span;
|
package/dist/llm/llm.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/llm/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { LLMMetrics } from '../metrics/base.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport { type APIConnectOptions, intervalForRetry } from '../types.js';\nimport { AsyncIterableQueue, delay, startSoon, toError } from '../utils.js';\nimport { type ChatContext, type ChatRole, type FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport interface ChoiceDelta {\n role: ChatRole;\n content?: string;\n toolCalls?: FunctionCall[];\n}\n\nexport interface CompletionUsage {\n completionTokens: number;\n promptTokens: number;\n promptCachedTokens: number;\n totalTokens: number;\n}\n\nexport interface ChatChunk {\n id: string;\n delta?: ChoiceDelta;\n usage?: CompletionUsage;\n}\n\nexport interface LLMError {\n type: 'llm_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type LLMCallbacks = {\n ['metrics_collected']: (metrics: LLMMetrics) => void;\n ['error']: (error: LLMError) => void;\n};\n\nexport abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCallbacks>) {\n constructor() {\n super();\n }\n\n abstract label(): string;\n\n /**\n * Get the model name/identifier for this LLM instance.\n *\n * @returns The model name if available, \"unknown\" otherwise.\n *\n * @remarks\n * Plugins should override this property to provide their model information.\n */\n get model(): string {\n return 'unknown';\n }\n\n /**\n * Returns a {@link LLMStream} that can be used to push text and receive LLM responses.\n */\n abstract chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream;\n\n /**\n * Pre-warm connection to the LLM service\n */\n prewarm(): void {\n // Default implementation - subclasses can override\n }\n\n async aclose(): Promise<void> {\n // Default implementation - subclasses can override\n }\n}\n\nexport abstract class LLMStream implements AsyncIterableIterator<ChatChunk> {\n protected output = new AsyncIterableQueue<ChatChunk>();\n protected queue = new AsyncIterableQueue<ChatChunk>();\n protected closed = false;\n protected abortController = new AbortController();\n protected _connOptions: APIConnectOptions;\n protected logger = log();\n\n #llm: LLM;\n #chatCtx: ChatContext;\n #toolCtx?: ToolContext;\n #llmRequestSpan?: Span;\n\n constructor(\n llm: LLM,\n {\n chatCtx,\n toolCtx,\n connOptions,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n ) {\n this.#llm = llm;\n this.#chatCtx = chatCtx;\n this.#toolCtx = toolCtx;\n this._connOptions = connOptions;\n this.monitorMetrics();\n this.abortController.signal.addEventListener('abort', () => {\n // TODO (AJS-37) clean this up when we refactor with streams\n this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().then(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#llmRequestSpan = span;\n span.setAttribute(traceTypes.ATTR_GEN_AI_REQUEST_MODEL, this.#llm.model);\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'llm_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this._connOptions, i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate LLM completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n this.emitError({ error, recoverable: true });\n this.logger.warn(\n { llm: this.#llm.label(), attempt: i + 1, error },\n `failed to generate LLM completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'llm_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#llm.emit('error', {\n type: 'llm_error',\n timestamp: Date.now(),\n label: this.#llm.label(),\n error,\n recoverable,\n });\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let ttft: bigint = BigInt(-1);\n let requestId = '';\n let usage: CompletionUsage | undefined;\n let completionStartTime: string | undefined;\n\n for await (const ev of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(ev);\n requestId = ev.id;\n if (ttft === BigInt(-1)) {\n ttft = process.hrtime.bigint() - startTime;\n completionStartTime = new Date().toISOString();\n }\n if (ev.usage) {\n usage = ev.usage;\n }\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const durationMs = Math.trunc(Number(duration / BigInt(1000000)));\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n timestamp: Date.now(),\n requestId,\n ttftMs: ttft === BigInt(-1) ? -1 : Math.trunc(Number(ttft / BigInt(1000000))),\n durationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#llm.label(),\n completionTokens: usage?.completionTokens || 0,\n promptTokens: usage?.promptTokens || 0,\n promptCachedTokens: usage?.promptCachedTokens || 0,\n totalTokens: usage?.totalTokens || 0,\n tokensPerSecond: (() => {\n if (durationMs <= 0) {\n return 0;\n }\n return (usage?.completionTokens || 0) / (durationMs / 1000);\n })(),\n };\n\n if (this.#llmRequestSpan) {\n this.#llmRequestSpan.setAttribute(traceTypes.ATTR_LLM_METRICS, JSON.stringify(metrics));\n\n this.#llmRequestSpan.setAttributes({\n [traceTypes.ATTR_GEN_AI_USAGE_INPUT_TOKENS]: metrics.promptTokens,\n [traceTypes.ATTR_GEN_AI_USAGE_OUTPUT_TOKENS]: metrics.completionTokens,\n });\n\n if (completionStartTime) {\n this.#llmRequestSpan.setAttribute(\n traceTypes.ATTR_LANGFUSE_COMPLETION_START_TIME,\n completionStartTime,\n );\n }\n\n // End the span now that metrics are collected\n this.#llmRequestSpan.end();\n }\n\n this.#llm.emit('metrics_collected', metrics);\n }\n\n protected abstract run(): Promise<void>;\n\n /** The function context of this stream. */\n get toolCtx(): ToolContext | undefined {\n return this.#toolCtx;\n }\n\n /** The initial chat context of this stream. */\n get chatCtx(): ChatContext {\n return this.#chatCtx;\n }\n\n /** The connection options for this stream. */\n get connOptions(): APIConnectOptions {\n return this._connOptions;\n }\n\n next(): Promise<IteratorResult<ChatChunk>> {\n return this.output.next();\n }\n\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): LLMStream {\n return this;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,yBAA6B;AAC7B,wBAA6C;AAC7C,iBAAoB;AAEpB,uBAAoD;AACpD,mBAAyD;AACzD,mBAA8D;AAC9D,0BAAmE;AAmC5D,MAAe,YAAa,gCAAsD;AAAA,EACvF,cAAc;AACZ,UAAM;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAwBA,UAAgB;AAAA,EAEhB;AAAA,EAEA,MAAM,SAAwB;AAAA,EAE9B;AACF;AAEO,MAAe,UAAsD;AAAA,EAChE,SAAS,IAAI,gCAA8B;AAAA,EAC3C,QAAQ,IAAI,gCAA8B;AAAA,EAC1C,SAAS;AAAA,EACT,kBAAkB,IAAI,gBAAgB;AAAA,EACtC;AAAA,EACA,aAAS,gBAAI;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YACE,KACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAKA;AACA,SAAK,OAAO;AACZ,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AACpB,SAAK,eAAe;AACpB,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAE1D,WAAK,OAAO,MAAM;AAClB,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,gCAAU,MAAM,KAAK,SAAS,EAAE,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,aAAa,4BAAW,2BAA2B,KAAK,KAAK,KAAK;AAEvE,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,cAAc,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AACL,iBAAK,UAAU,EAAE,OAAO,aAAa,KAAK,CAAC;AAC3C,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,MAAM,GAAG,SAAS,IAAI,GAAG,MAAM;AAAA,cAChD,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAChB,QAAI;AACJ,QAAI;AAEJ,qBAAiB,MAAM,KAAK,OAAO;AACjC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,EAAE;AAClB,kBAAY,GAAG;AACf,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AACjC,+BAAsB,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC/C;AACA,UAAI,GAAG,OAAO;AACZ,gBAAQ,GAAG;AAAA,MACb;AAAA,IACF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,aAAa,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAChE,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E;AAAA,MACA,WAAW,KAAK,gBAAgB,OAAO;AAAA,MACvC,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB,mBAAkB,+BAAO,qBAAoB;AAAA,MAC7C,eAAc,+BAAO,iBAAgB;AAAA,MACrC,qBAAoB,+BAAO,uBAAsB;AAAA,MACjD,cAAa,+BAAO,gBAAe;AAAA,MACnC,kBAAkB,MAAM;AACtB,YAAI,cAAc,GAAG;AACnB,iBAAO;AAAA,QACT;AACA,iBAAQ,+BAAO,qBAAoB,MAAM,aAAa;AAAA,MACxD,GAAG;AAAA,IACL;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAEtF,WAAK,gBAAgB,cAAc;AAAA,QACjC,CAAC,4BAAW,8BAA8B,GAAG,QAAQ;AAAA,QACrD,CAAC,4BAAW,+BAA+B,GAAG,QAAQ;AAAA,MACxD,CAAC;AAED,UAAI,qBAAqB;AACvB,aAAK,gBAAgB;AAAA,UACnB,4BAAW;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,WAAK,gBAAgB,IAAI;AAAA,IAC3B;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAKA,IAAI,UAAmC;AACrC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,UAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAiC;AACnC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAA2C;AACzC,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAe;AAClC,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../src/llm/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { LLMMetrics } from '../metrics/base.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport { type APIConnectOptions, intervalForRetry } from '../types.js';\nimport { AsyncIterableQueue, delay, startSoon, toError } from '../utils.js';\nimport { type ChatContext, type ChatRole, type FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport interface ChoiceDelta {\n role: ChatRole;\n content?: string;\n toolCalls?: FunctionCall[];\n}\n\nexport interface CompletionUsage {\n completionTokens: number;\n promptTokens: number;\n promptCachedTokens: number;\n totalTokens: number;\n}\n\nexport interface ChatChunk {\n id: string;\n delta?: ChoiceDelta;\n usage?: CompletionUsage;\n}\n\nexport interface LLMError {\n type: 'llm_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type LLMCallbacks = {\n ['metrics_collected']: (metrics: LLMMetrics) => void;\n ['error']: (error: LLMError) => void;\n};\n\nexport abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCallbacks>) {\n constructor() {\n super();\n }\n\n abstract label(): string;\n\n /**\n * Get the model name/identifier for this LLM instance.\n *\n * @returns The model name if available, \"unknown\" otherwise.\n *\n * @remarks\n * Plugins should override this property to provide their model information.\n */\n get model(): string {\n return 'unknown';\n }\n\n /**\n * Returns a {@link LLMStream} that can be used to push text and receive LLM responses.\n */\n abstract chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream;\n\n /**\n * Pre-warm connection to the LLM service\n */\n prewarm(): void {\n // Default implementation - subclasses can override\n }\n\n async aclose(): Promise<void> {\n // Default implementation - subclasses can override\n }\n}\n\nexport abstract class LLMStream implements AsyncIterableIterator<ChatChunk> {\n protected output = new AsyncIterableQueue<ChatChunk>();\n protected queue = new AsyncIterableQueue<ChatChunk>();\n protected closed = false;\n protected abortController = new AbortController();\n protected _connOptions: APIConnectOptions;\n protected logger = log();\n\n #llm: LLM;\n #chatCtx: ChatContext;\n #toolCtx?: ToolContext;\n #llmRequestSpan?: Span;\n\n constructor(\n llm: LLM,\n {\n chatCtx,\n toolCtx,\n connOptions,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n ) {\n this.#llm = llm;\n this.#chatCtx = chatCtx;\n this.#toolCtx = toolCtx;\n this._connOptions = connOptions;\n this.monitorMetrics();\n this.abortController.signal.addEventListener('abort', () => {\n // TODO (AJS-37) clean this up when we refactor with streams\n this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#llmRequestSpan = span;\n span.setAttribute(traceTypes.ATTR_GEN_AI_REQUEST_MODEL, this.#llm.model);\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'llm_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this._connOptions, i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate LLM completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n this.emitError({ error, recoverable: true });\n this.logger.warn(\n { llm: this.#llm.label(), attempt: i + 1, error },\n `failed to generate LLM completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'llm_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#llm.emit('error', {\n type: 'llm_error',\n timestamp: Date.now(),\n label: this.#llm.label(),\n error,\n recoverable,\n });\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let ttft: bigint = BigInt(-1);\n let requestId = '';\n let usage: CompletionUsage | undefined;\n let completionStartTime: string | undefined;\n\n for await (const ev of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(ev);\n requestId = ev.id;\n if (ttft === BigInt(-1)) {\n ttft = process.hrtime.bigint() - startTime;\n completionStartTime = new Date().toISOString();\n }\n if (ev.usage) {\n usage = ev.usage;\n }\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const durationMs = Math.trunc(Number(duration / BigInt(1000000)));\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n timestamp: Date.now(),\n requestId,\n ttftMs: ttft === BigInt(-1) ? -1 : Math.trunc(Number(ttft / BigInt(1000000))),\n durationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#llm.label(),\n completionTokens: usage?.completionTokens || 0,\n promptTokens: usage?.promptTokens || 0,\n promptCachedTokens: usage?.promptCachedTokens || 0,\n totalTokens: usage?.totalTokens || 0,\n tokensPerSecond: (() => {\n if (durationMs <= 0) {\n return 0;\n }\n return (usage?.completionTokens || 0) / (durationMs / 1000);\n })(),\n };\n\n if (this.#llmRequestSpan) {\n this.#llmRequestSpan.setAttribute(traceTypes.ATTR_LLM_METRICS, JSON.stringify(metrics));\n\n this.#llmRequestSpan.setAttributes({\n [traceTypes.ATTR_GEN_AI_USAGE_INPUT_TOKENS]: metrics.promptTokens,\n [traceTypes.ATTR_GEN_AI_USAGE_OUTPUT_TOKENS]: metrics.completionTokens,\n });\n\n if (completionStartTime) {\n this.#llmRequestSpan.setAttribute(\n traceTypes.ATTR_LANGFUSE_COMPLETION_START_TIME,\n completionStartTime,\n );\n }\n\n // End the span now that metrics are collected\n this.#llmRequestSpan.end();\n }\n\n this.#llm.emit('metrics_collected', metrics);\n }\n\n protected abstract run(): Promise<void>;\n\n /** The function context of this stream. */\n get toolCtx(): ToolContext | undefined {\n return this.#toolCtx;\n }\n\n /** The initial chat context of this stream. */\n get chatCtx(): ChatContext {\n return this.#chatCtx;\n }\n\n /** The connection options for this stream. */\n get connOptions(): APIConnectOptions {\n return this._connOptions;\n }\n\n next(): Promise<IteratorResult<ChatChunk>> {\n return this.output.next();\n }\n\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): LLMStream {\n return this;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,yBAA6B;AAC7B,wBAA6C;AAC7C,iBAAoB;AAEpB,uBAAoD;AACpD,mBAAyD;AACzD,mBAA8D;AAC9D,0BAAmE;AAmC5D,MAAe,YAAa,gCAAsD;AAAA,EACvF,cAAc;AACZ,UAAM;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAwBA,UAAgB;AAAA,EAEhB;AAAA,EAEA,MAAM,SAAwB;AAAA,EAE9B;AACF;AAEO,MAAe,UAAsD;AAAA,EAChE,SAAS,IAAI,gCAA8B;AAAA,EAC3C,QAAQ,IAAI,gCAA8B;AAAA,EAC1C,SAAS;AAAA,EACT,kBAAkB,IAAI,gBAAgB;AAAA,EACtC;AAAA,EACA,aAAS,gBAAI;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YACE,KACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAKA;AACA,SAAK,OAAO;AACZ,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AACpB,SAAK,eAAe;AACpB,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAE1D,WAAK,OAAO,MAAM;AAClB,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,gCAAU,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EACnE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,aAAa,4BAAW,2BAA2B,KAAK,KAAK,KAAK;AAEvE,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,cAAc,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AACL,iBAAK,UAAU,EAAE,OAAO,aAAa,KAAK,CAAC;AAC3C,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,MAAM,GAAG,SAAS,IAAI,GAAG,MAAM;AAAA,cAChD,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAChB,QAAI;AACJ,QAAI;AAEJ,qBAAiB,MAAM,KAAK,OAAO;AACjC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,EAAE;AAClB,kBAAY,GAAG;AACf,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AACjC,+BAAsB,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC/C;AACA,UAAI,GAAG,OAAO;AACZ,gBAAQ,GAAG;AAAA,MACb;AAAA,IACF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,aAAa,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAChE,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E;AAAA,MACA,WAAW,KAAK,gBAAgB,OAAO;AAAA,MACvC,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB,mBAAkB,+BAAO,qBAAoB;AAAA,MAC7C,eAAc,+BAAO,iBAAgB;AAAA,MACrC,qBAAoB,+BAAO,uBAAsB;AAAA,MACjD,cAAa,+BAAO,gBAAe;AAAA,MACnC,kBAAkB,MAAM;AACtB,YAAI,cAAc,GAAG;AACnB,iBAAO;AAAA,QACT;AACA,iBAAQ,+BAAO,qBAAoB,MAAM,aAAa;AAAA,MACxD,GAAG;AAAA,IACL;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAEtF,WAAK,gBAAgB,cAAc;AAAA,QACjC,CAAC,4BAAW,8BAA8B,GAAG,QAAQ;AAAA,QACrD,CAAC,4BAAW,+BAA+B,GAAG,QAAQ;AAAA,MACxD,CAAC;AAED,UAAI,qBAAqB;AACvB,aAAK,gBAAgB;AAAA,UACnB,4BAAW;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,WAAK,gBAAgB,IAAI;AAAA,IAC3B;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAKA,IAAI,UAAmC;AACrC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,UAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAiC;AACnC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAA2C;AACzC,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAe;AAClC,WAAO;AAAA,EACT;AACF;","names":[]}
|
package/dist/llm/llm.js
CHANGED
|
@@ -53,7 +53,7 @@ class LLMStream {
|
|
|
53
53
|
this.output.close();
|
|
54
54
|
this.closed = true;
|
|
55
55
|
});
|
|
56
|
-
startSoon(() => this.mainTask().
|
|
56
|
+
startSoon(() => this.mainTask().finally(() => this.queue.close()));
|
|
57
57
|
}
|
|
58
58
|
_mainTaskImpl = async (span) => {
|
|
59
59
|
this.#llmRequestSpan = span;
|
package/dist/llm/llm.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/llm/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { LLMMetrics } from '../metrics/base.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport { type APIConnectOptions, intervalForRetry } from '../types.js';\nimport { AsyncIterableQueue, delay, startSoon, toError } from '../utils.js';\nimport { type ChatContext, type ChatRole, type FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport interface ChoiceDelta {\n role: ChatRole;\n content?: string;\n toolCalls?: FunctionCall[];\n}\n\nexport interface CompletionUsage {\n completionTokens: number;\n promptTokens: number;\n promptCachedTokens: number;\n totalTokens: number;\n}\n\nexport interface ChatChunk {\n id: string;\n delta?: ChoiceDelta;\n usage?: CompletionUsage;\n}\n\nexport interface LLMError {\n type: 'llm_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type LLMCallbacks = {\n ['metrics_collected']: (metrics: LLMMetrics) => void;\n ['error']: (error: LLMError) => void;\n};\n\nexport abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCallbacks>) {\n constructor() {\n super();\n }\n\n abstract label(): string;\n\n /**\n * Get the model name/identifier for this LLM instance.\n *\n * @returns The model name if available, \"unknown\" otherwise.\n *\n * @remarks\n * Plugins should override this property to provide their model information.\n */\n get model(): string {\n return 'unknown';\n }\n\n /**\n * Returns a {@link LLMStream} that can be used to push text and receive LLM responses.\n */\n abstract chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream;\n\n /**\n * Pre-warm connection to the LLM service\n */\n prewarm(): void {\n // Default implementation - subclasses can override\n }\n\n async aclose(): Promise<void> {\n // Default implementation - subclasses can override\n }\n}\n\nexport abstract class LLMStream implements AsyncIterableIterator<ChatChunk> {\n protected output = new AsyncIterableQueue<ChatChunk>();\n protected queue = new AsyncIterableQueue<ChatChunk>();\n protected closed = false;\n protected abortController = new AbortController();\n protected _connOptions: APIConnectOptions;\n protected logger = log();\n\n #llm: LLM;\n #chatCtx: ChatContext;\n #toolCtx?: ToolContext;\n #llmRequestSpan?: Span;\n\n constructor(\n llm: LLM,\n {\n chatCtx,\n toolCtx,\n connOptions,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n ) {\n this.#llm = llm;\n this.#chatCtx = chatCtx;\n this.#toolCtx = toolCtx;\n this._connOptions = connOptions;\n this.monitorMetrics();\n this.abortController.signal.addEventListener('abort', () => {\n // TODO (AJS-37) clean this up when we refactor with streams\n this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().then(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#llmRequestSpan = span;\n span.setAttribute(traceTypes.ATTR_GEN_AI_REQUEST_MODEL, this.#llm.model);\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'llm_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this._connOptions, i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate LLM completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n this.emitError({ error, recoverable: true });\n this.logger.warn(\n { llm: this.#llm.label(), attempt: i + 1, error },\n `failed to generate LLM completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'llm_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#llm.emit('error', {\n type: 'llm_error',\n timestamp: Date.now(),\n label: this.#llm.label(),\n error,\n recoverable,\n });\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let ttft: bigint = BigInt(-1);\n let requestId = '';\n let usage: CompletionUsage | undefined;\n let completionStartTime: string | undefined;\n\n for await (const ev of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(ev);\n requestId = ev.id;\n if (ttft === BigInt(-1)) {\n ttft = process.hrtime.bigint() - startTime;\n completionStartTime = new Date().toISOString();\n }\n if (ev.usage) {\n usage = ev.usage;\n }\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const durationMs = Math.trunc(Number(duration / BigInt(1000000)));\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n timestamp: Date.now(),\n requestId,\n ttftMs: ttft === BigInt(-1) ? -1 : Math.trunc(Number(ttft / BigInt(1000000))),\n durationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#llm.label(),\n completionTokens: usage?.completionTokens || 0,\n promptTokens: usage?.promptTokens || 0,\n promptCachedTokens: usage?.promptCachedTokens || 0,\n totalTokens: usage?.totalTokens || 0,\n tokensPerSecond: (() => {\n if (durationMs <= 0) {\n return 0;\n }\n return (usage?.completionTokens || 0) / (durationMs / 1000);\n })(),\n };\n\n if (this.#llmRequestSpan) {\n this.#llmRequestSpan.setAttribute(traceTypes.ATTR_LLM_METRICS, JSON.stringify(metrics));\n\n this.#llmRequestSpan.setAttributes({\n [traceTypes.ATTR_GEN_AI_USAGE_INPUT_TOKENS]: metrics.promptTokens,\n [traceTypes.ATTR_GEN_AI_USAGE_OUTPUT_TOKENS]: metrics.completionTokens,\n });\n\n if (completionStartTime) {\n this.#llmRequestSpan.setAttribute(\n traceTypes.ATTR_LANGFUSE_COMPLETION_START_TIME,\n completionStartTime,\n );\n }\n\n // End the span now that metrics are collected\n this.#llmRequestSpan.end();\n }\n\n this.#llm.emit('metrics_collected', metrics);\n }\n\n protected abstract run(): Promise<void>;\n\n /** The function context of this stream. */\n get toolCtx(): ToolContext | undefined {\n return this.#toolCtx;\n }\n\n /** The initial chat context of this stream. */\n get chatCtx(): ChatContext {\n return this.#chatCtx;\n }\n\n /** The connection options for this stream. */\n get connOptions(): APIConnectOptions {\n return this._connOptions;\n }\n\n next(): Promise<IteratorResult<ChatChunk>> {\n return this.output.next();\n }\n\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): LLMStream {\n return this;\n }\n}\n"],"mappings":"AAKA,SAAS,oBAAoB;AAC7B,SAAS,oBAAoB,gBAAgB;AAC7C,SAAS,WAAW;AAEpB,SAAS,iBAAiB,YAAY,cAAc;AACpD,SAAiC,wBAAwB;AACzD,SAAS,oBAAoB,OAAO,WAAW,eAAe;AAC9D,eAAmE;AAmC5D,MAAe,YAAa,aAAsD;AAAA,EACvF,cAAc;AACZ,UAAM;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAwBA,UAAgB;AAAA,EAEhB;AAAA,EAEA,MAAM,SAAwB;AAAA,EAE9B;AACF;AAEO,MAAe,UAAsD;AAAA,EAChE,SAAS,IAAI,mBAA8B;AAAA,EAC3C,QAAQ,IAAI,mBAA8B;AAAA,EAC1C,SAAS;AAAA,EACT,kBAAkB,IAAI,gBAAgB;AAAA,EACtC;AAAA,EACA,SAAS,IAAI;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YACE,KACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAKA;AACA,SAAK,OAAO;AACZ,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AACpB,SAAK,eAAe;AACpB,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAE1D,WAAK,OAAO,MAAM;AAClB,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,cAAU,MAAM,KAAK,SAAS,EAAE,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,aAAa,WAAW,2BAA2B,KAAK,KAAK,KAAK;AAEvE,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,OAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,WAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,8BAAgB,aAAa,QAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,UAAU;AAC7B,gBAAM,gBAAgB,iBAAiB,KAAK,cAAc,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,mBAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AACL,iBAAK,UAAU,EAAE,OAAO,aAAa,KAAK,CAAC;AAC3C,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,MAAM,GAAG,SAAS,IAAI,GAAG,MAAM;AAAA,cAChD,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,kBAAM,MAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,OAAO,QAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,OAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAChB,QAAI;AACJ,QAAI;AAEJ,qBAAiB,MAAM,KAAK,OAAO;AACjC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,EAAE;AAClB,kBAAY,GAAG;AACf,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AACjC,+BAAsB,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC/C;AACA,UAAI,GAAG,OAAO;AACZ,gBAAQ,GAAG;AAAA,MACb;AAAA,IACF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,aAAa,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAChE,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E;AAAA,MACA,WAAW,KAAK,gBAAgB,OAAO;AAAA,MACvC,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB,mBAAkB,+BAAO,qBAAoB;AAAA,MAC7C,eAAc,+BAAO,iBAAgB;AAAA,MACrC,qBAAoB,+BAAO,uBAAsB;AAAA,MACjD,cAAa,+BAAO,gBAAe;AAAA,MACnC,kBAAkB,MAAM;AACtB,YAAI,cAAc,GAAG;AACnB,iBAAO;AAAA,QACT;AACA,iBAAQ,+BAAO,qBAAoB,MAAM,aAAa;AAAA,MACxD,GAAG;AAAA,IACL;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,WAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAEtF,WAAK,gBAAgB,cAAc;AAAA,QACjC,CAAC,WAAW,8BAA8B,GAAG,QAAQ;AAAA,QACrD,CAAC,WAAW,+BAA+B,GAAG,QAAQ;AAAA,MACxD,CAAC;AAED,UAAI,qBAAqB;AACvB,aAAK,gBAAgB;AAAA,UACnB,WAAW;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,WAAK,gBAAgB,IAAI;AAAA,IAC3B;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAKA,IAAI,UAAmC;AACrC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,UAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAiC;AACnC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAA2C;AACzC,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAe;AAClC,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../src/llm/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { LLMMetrics } from '../metrics/base.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport { type APIConnectOptions, intervalForRetry } from '../types.js';\nimport { AsyncIterableQueue, delay, startSoon, toError } from '../utils.js';\nimport { type ChatContext, type ChatRole, type FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport interface ChoiceDelta {\n role: ChatRole;\n content?: string;\n toolCalls?: FunctionCall[];\n}\n\nexport interface CompletionUsage {\n completionTokens: number;\n promptTokens: number;\n promptCachedTokens: number;\n totalTokens: number;\n}\n\nexport interface ChatChunk {\n id: string;\n delta?: ChoiceDelta;\n usage?: CompletionUsage;\n}\n\nexport interface LLMError {\n type: 'llm_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type LLMCallbacks = {\n ['metrics_collected']: (metrics: LLMMetrics) => void;\n ['error']: (error: LLMError) => void;\n};\n\nexport abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCallbacks>) {\n constructor() {\n super();\n }\n\n abstract label(): string;\n\n /**\n * Get the model name/identifier for this LLM instance.\n *\n * @returns The model name if available, \"unknown\" otherwise.\n *\n * @remarks\n * Plugins should override this property to provide their model information.\n */\n get model(): string {\n return 'unknown';\n }\n\n /**\n * Returns a {@link LLMStream} that can be used to push text and receive LLM responses.\n */\n abstract chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream;\n\n /**\n * Pre-warm connection to the LLM service\n */\n prewarm(): void {\n // Default implementation - subclasses can override\n }\n\n async aclose(): Promise<void> {\n // Default implementation - subclasses can override\n }\n}\n\nexport abstract class LLMStream implements AsyncIterableIterator<ChatChunk> {\n protected output = new AsyncIterableQueue<ChatChunk>();\n protected queue = new AsyncIterableQueue<ChatChunk>();\n protected closed = false;\n protected abortController = new AbortController();\n protected _connOptions: APIConnectOptions;\n protected logger = log();\n\n #llm: LLM;\n #chatCtx: ChatContext;\n #toolCtx?: ToolContext;\n #llmRequestSpan?: Span;\n\n constructor(\n llm: LLM,\n {\n chatCtx,\n toolCtx,\n connOptions,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n ) {\n this.#llm = llm;\n this.#chatCtx = chatCtx;\n this.#toolCtx = toolCtx;\n this._connOptions = connOptions;\n this.monitorMetrics();\n this.abortController.signal.addEventListener('abort', () => {\n // TODO (AJS-37) clean this up when we refactor with streams\n this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#llmRequestSpan = span;\n span.setAttribute(traceTypes.ATTR_GEN_AI_REQUEST_MODEL, this.#llm.model);\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'llm_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this._connOptions, i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate LLM completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n this.emitError({ error, recoverable: true });\n this.logger.warn(\n { llm: this.#llm.label(), attempt: i + 1, error },\n `failed to generate LLM completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'llm_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#llm.emit('error', {\n type: 'llm_error',\n timestamp: Date.now(),\n label: this.#llm.label(),\n error,\n recoverable,\n });\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let ttft: bigint = BigInt(-1);\n let requestId = '';\n let usage: CompletionUsage | undefined;\n let completionStartTime: string | undefined;\n\n for await (const ev of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(ev);\n requestId = ev.id;\n if (ttft === BigInt(-1)) {\n ttft = process.hrtime.bigint() - startTime;\n completionStartTime = new Date().toISOString();\n }\n if (ev.usage) {\n usage = ev.usage;\n }\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const durationMs = Math.trunc(Number(duration / BigInt(1000000)));\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n timestamp: Date.now(),\n requestId,\n ttftMs: ttft === BigInt(-1) ? -1 : Math.trunc(Number(ttft / BigInt(1000000))),\n durationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#llm.label(),\n completionTokens: usage?.completionTokens || 0,\n promptTokens: usage?.promptTokens || 0,\n promptCachedTokens: usage?.promptCachedTokens || 0,\n totalTokens: usage?.totalTokens || 0,\n tokensPerSecond: (() => {\n if (durationMs <= 0) {\n return 0;\n }\n return (usage?.completionTokens || 0) / (durationMs / 1000);\n })(),\n };\n\n if (this.#llmRequestSpan) {\n this.#llmRequestSpan.setAttribute(traceTypes.ATTR_LLM_METRICS, JSON.stringify(metrics));\n\n this.#llmRequestSpan.setAttributes({\n [traceTypes.ATTR_GEN_AI_USAGE_INPUT_TOKENS]: metrics.promptTokens,\n [traceTypes.ATTR_GEN_AI_USAGE_OUTPUT_TOKENS]: metrics.completionTokens,\n });\n\n if (completionStartTime) {\n this.#llmRequestSpan.setAttribute(\n traceTypes.ATTR_LANGFUSE_COMPLETION_START_TIME,\n completionStartTime,\n );\n }\n\n // End the span now that metrics are collected\n this.#llmRequestSpan.end();\n }\n\n this.#llm.emit('metrics_collected', metrics);\n }\n\n protected abstract run(): Promise<void>;\n\n /** The function context of this stream. */\n get toolCtx(): ToolContext | undefined {\n return this.#toolCtx;\n }\n\n /** The initial chat context of this stream. */\n get chatCtx(): ChatContext {\n return this.#chatCtx;\n }\n\n /** The connection options for this stream. */\n get connOptions(): APIConnectOptions {\n return this._connOptions;\n }\n\n next(): Promise<IteratorResult<ChatChunk>> {\n return this.output.next();\n }\n\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): LLMStream {\n return this;\n }\n}\n"],"mappings":"AAKA,SAAS,oBAAoB;AAC7B,SAAS,oBAAoB,gBAAgB;AAC7C,SAAS,WAAW;AAEpB,SAAS,iBAAiB,YAAY,cAAc;AACpD,SAAiC,wBAAwB;AACzD,SAAS,oBAAoB,OAAO,WAAW,eAAe;AAC9D,eAAmE;AAmC5D,MAAe,YAAa,aAAsD;AAAA,EACvF,cAAc;AACZ,UAAM;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAwBA,UAAgB;AAAA,EAEhB;AAAA,EAEA,MAAM,SAAwB;AAAA,EAE9B;AACF;AAEO,MAAe,UAAsD;AAAA,EAChE,SAAS,IAAI,mBAA8B;AAAA,EAC3C,QAAQ,IAAI,mBAA8B;AAAA,EAC1C,SAAS;AAAA,EACT,kBAAkB,IAAI,gBAAgB;AAAA,EACtC;AAAA,EACA,SAAS,IAAI;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YACE,KACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAKA;AACA,SAAK,OAAO;AACZ,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AACpB,SAAK,eAAe;AACpB,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAE1D,WAAK,OAAO,MAAM;AAClB,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,cAAU,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EACnE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,aAAa,WAAW,2BAA2B,KAAK,KAAK,KAAK;AAEvE,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,OAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,WAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,8BAAgB,aAAa,QAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,UAAU;AAC7B,gBAAM,gBAAgB,iBAAiB,KAAK,cAAc,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,mBAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AACL,iBAAK,UAAU,EAAE,OAAO,aAAa,KAAK,CAAC;AAC3C,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,MAAM,GAAG,SAAS,IAAI,GAAG,MAAM;AAAA,cAChD,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,kBAAM,MAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,OAAO,QAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,OAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAChB,QAAI;AACJ,QAAI;AAEJ,qBAAiB,MAAM,KAAK,OAAO;AACjC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,EAAE;AAClB,kBAAY,GAAG;AACf,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AACjC,+BAAsB,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC/C;AACA,UAAI,GAAG,OAAO;AACZ,gBAAQ,GAAG;AAAA,MACb;AAAA,IACF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,aAAa,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAChE,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E;AAAA,MACA,WAAW,KAAK,gBAAgB,OAAO;AAAA,MACvC,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB,mBAAkB,+BAAO,qBAAoB;AAAA,MAC7C,eAAc,+BAAO,iBAAgB;AAAA,MACrC,qBAAoB,+BAAO,uBAAsB;AAAA,MACjD,cAAa,+BAAO,gBAAe;AAAA,MACnC,kBAAkB,MAAM;AACtB,YAAI,cAAc,GAAG;AACnB,iBAAO;AAAA,QACT;AACA,iBAAQ,+BAAO,qBAAoB,MAAM,aAAa;AAAA,MACxD,GAAG;AAAA,IACL;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,WAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAEtF,WAAK,gBAAgB,cAAc;AAAA,QACjC,CAAC,WAAW,8BAA8B,GAAG,QAAQ;AAAA,QACrD,CAAC,WAAW,+BAA+B,GAAG,QAAQ;AAAA,MACxD,CAAC;AAED,UAAI,qBAAqB;AACvB,aAAK,gBAAgB;AAAA,UACnB,WAAW;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,WAAK,gBAAgB,IAAI;AAAA,IAC3B;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAKA,IAAI,UAAmC;AACrC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,UAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAiC;AACnC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAA2C;AACzC,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAe;AAClC,WAAO;AAAA,EACT;AACF;","names":[]}
|
package/dist/log.cjs
CHANGED
|
@@ -64,12 +64,12 @@ const enableOtelLogging = () => {
|
|
|
64
64
|
}
|
|
65
65
|
otelEnabled = true;
|
|
66
66
|
const { pretty, level } = loggerOptions;
|
|
67
|
-
const
|
|
67
|
+
const logLevel = level || "info";
|
|
68
68
|
const streams = [
|
|
69
|
-
{ stream: pretty ? (0, import_pino_pretty.build)({ colorize: true }) : process.stdout, level:
|
|
69
|
+
{ stream: pretty ? (0, import_pino_pretty.build)({ colorize: true }) : process.stdout, level: logLevel },
|
|
70
70
|
{ stream: new OtelDestination(), level: "debug" }
|
|
71
71
|
];
|
|
72
|
-
logger = (0, import_pino.pino)({ level:
|
|
72
|
+
logger = (0, import_pino.pino)({ level: logLevel }, (0, import_pino.multistream)(streams));
|
|
73
73
|
};
|
|
74
74
|
// Annotate the CommonJS export names for ESM import in node:
|
|
75
75
|
0 && (module.exports = {
|
package/dist/log.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/log.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { Writable } from 'node:stream';\nimport type { DestinationStream, Logger } from 'pino';\nimport { multistream, pino } from 'pino';\nimport { build as pinoPretty } from 'pino-pretty';\nimport { type PinoLogObject, emitToOtel } from './telemetry/pino_otel_transport.js';\n\n/** @internal */\nexport type LoggerOptions = {\n pretty: boolean;\n level?: string;\n};\n\n/** @internal */\nexport let loggerOptions: LoggerOptions;\n\n/** @internal */\nlet logger: Logger | undefined = undefined;\n\n/** @internal */\nlet otelEnabled = false;\n\n/** @internal */\nexport const log = () => {\n if (!logger) {\n throw new TypeError('logger not initialized. did you forget to run initializeLogger()?');\n }\n return logger;\n};\n\n/** @internal */\nexport const initializeLogger = ({ pretty, level }: LoggerOptions) => {\n loggerOptions = { pretty, level };\n logger = pino(\n { level: level || 'info' },\n pretty ? pinoPretty({ colorize: true }) : process.stdout,\n );\n};\n\n/**\n * Custom Pino destination that parses JSON logs and emits to OTEL.\n * This receives the FULL serialized log including msg, level, time, etc.\n */\nclass OtelDestination extends Writable {\n _write(chunk: Buffer, _encoding: string, callback: (error?: Error | null) => void): void {\n try {\n const line = chunk.toString().trim();\n if (line) {\n const logObj = JSON.parse(line) as PinoLogObject;\n emitToOtel(logObj);\n }\n } catch {\n // Ignore parse errors (e.g., non-JSON lines)\n }\n callback();\n }\n}\n\n/**\n * Enable OTEL logging by reconfiguring the logger with multistream.\n * Uses a custom destination that receives full JSON logs (with msg, level, time).\n *\n *
|
|
1
|
+
{"version":3,"sources":["../src/log.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { Writable } from 'node:stream';\nimport type { DestinationStream, Logger } from 'pino';\nimport { multistream, pino } from 'pino';\nimport { build as pinoPretty } from 'pino-pretty';\nimport { type PinoLogObject, emitToOtel } from './telemetry/pino_otel_transport.js';\n\n/** @internal */\nexport type LoggerOptions = {\n pretty: boolean;\n level?: string;\n};\n\n/** @internal */\nexport let loggerOptions: LoggerOptions;\n\n/** @internal */\nlet logger: Logger | undefined = undefined;\n\n/** @internal */\nlet otelEnabled = false;\n\n/** @internal */\nexport const log = () => {\n if (!logger) {\n throw new TypeError('logger not initialized. did you forget to run initializeLogger()?');\n }\n return logger;\n};\n\n/** @internal */\nexport const initializeLogger = ({ pretty, level }: LoggerOptions) => {\n loggerOptions = { pretty, level };\n logger = pino(\n { level: level || 'info' },\n pretty ? pinoPretty({ colorize: true }) : process.stdout,\n );\n};\n\n/**\n * Custom Pino destination that parses JSON logs and emits to OTEL.\n * This receives the FULL serialized log including msg, level, time, etc.\n */\nclass OtelDestination extends Writable {\n _write(chunk: Buffer, _encoding: string, callback: (error?: Error | null) => void): void {\n try {\n const line = chunk.toString().trim();\n if (line) {\n const logObj = JSON.parse(line) as PinoLogObject;\n emitToOtel(logObj);\n }\n } catch {\n // Ignore parse errors (e.g., non-JSON lines)\n }\n callback();\n }\n}\n\n/**\n * Enable OTEL logging by reconfiguring the logger with multistream.\n * Uses a custom destination that receives full JSON logs (with msg, level, time).\n *\n * @internal\n */\nexport const enableOtelLogging = () => {\n if (otelEnabled || !logger) {\n console.warn('OTEL logging already enabled or logger not initialized');\n return;\n }\n otelEnabled = true;\n\n const { pretty, level } = loggerOptions;\n\n const logLevel = level || 'info';\n const streams: { stream: DestinationStream; level: string }[] = [\n { stream: pretty ? pinoPretty({ colorize: true }) : process.stdout, level: logLevel },\n { stream: new OtelDestination(), level: 'debug' },\n ];\n\n logger = pino({ level: logLevel }, multistream(streams));\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,yBAAyB;AAEzB,kBAAkC;AAClC,yBAAoC;AACpC,iCAA+C;AASxC,IAAI;AAGX,IAAI,SAA6B;AAGjC,IAAI,cAAc;AAGX,MAAM,MAAM,MAAM;AACvB,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,UAAU,mEAAmE;AAAA,EACzF;AACA,SAAO;AACT;AAGO,MAAM,mBAAmB,CAAC,EAAE,QAAQ,MAAM,MAAqB;AACpE,kBAAgB,EAAE,QAAQ,MAAM;AAChC,eAAS;AAAA,IACP,EAAE,OAAO,SAAS,OAAO;AAAA,IACzB,aAAS,mBAAAA,OAAW,EAAE,UAAU,KAAK,CAAC,IAAI,QAAQ;AAAA,EACpD;AACF;AAMA,MAAM,wBAAwB,4BAAS;AAAA,EACrC,OAAO,OAAe,WAAmB,UAAgD;AACvF,QAAI;AACF,YAAM,OAAO,MAAM,SAAS,EAAE,KAAK;AACnC,UAAI,MAAM;AACR,cAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,mDAAW,MAAM;AAAA,MACnB;AAAA,IACF,QAAQ;AAAA,IAER;AACA,aAAS;AAAA,EACX;AACF;AAQO,MAAM,oBAAoB,MAAM;AACrC,MAAI,eAAe,CAAC,QAAQ;AAC1B,YAAQ,KAAK,wDAAwD;AACrE;AAAA,EACF;AACA,gBAAc;AAEd,QAAM,EAAE,QAAQ,MAAM,IAAI;AAE1B,QAAM,WAAW,SAAS;AAC1B,QAAM,UAA0D;AAAA,IAC9D,EAAE,QAAQ,aAAS,mBAAAA,OAAW,EAAE,UAAU,KAAK,CAAC,IAAI,QAAQ,QAAQ,OAAO,SAAS;AAAA,IACpF,EAAE,QAAQ,IAAI,gBAAgB,GAAG,OAAO,QAAQ;AAAA,EAClD;AAEA,eAAS,kBAAK,EAAE,OAAO,SAAS,OAAG,yBAAY,OAAO,CAAC;AACzD;","names":["pinoPretty"]}
|
package/dist/log.d.cts
CHANGED
|
@@ -14,11 +14,6 @@ export declare const initializeLogger: ({ pretty, level }: LoggerOptions) => voi
|
|
|
14
14
|
* Enable OTEL logging by reconfiguring the logger with multistream.
|
|
15
15
|
* Uses a custom destination that receives full JSON logs (with msg, level, time).
|
|
16
16
|
*
|
|
17
|
-
* The base logger level is set to 'debug' so all logs are generated,
|
|
18
|
-
* while each stream filters to its own level:
|
|
19
|
-
* - Terminal: user-specified level (default: 'info')
|
|
20
|
-
* - OTEL/Cloud: always 'debug' to capture all logs for observability
|
|
21
|
-
*
|
|
22
17
|
* @internal
|
|
23
18
|
*/
|
|
24
19
|
export declare const enableOtelLogging: () => void;
|
package/dist/log.d.ts
CHANGED
|
@@ -14,11 +14,6 @@ export declare const initializeLogger: ({ pretty, level }: LoggerOptions) => voi
|
|
|
14
14
|
* Enable OTEL logging by reconfiguring the logger with multistream.
|
|
15
15
|
* Uses a custom destination that receives full JSON logs (with msg, level, time).
|
|
16
16
|
*
|
|
17
|
-
* The base logger level is set to 'debug' so all logs are generated,
|
|
18
|
-
* while each stream filters to its own level:
|
|
19
|
-
* - Terminal: user-specified level (default: 'info')
|
|
20
|
-
* - OTEL/Cloud: always 'debug' to capture all logs for observability
|
|
21
|
-
*
|
|
22
17
|
* @internal
|
|
23
18
|
*/
|
|
24
19
|
export declare const enableOtelLogging: () => void;
|
package/dist/log.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"log.d.ts","sourceRoot":"","sources":["../src/log.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAqB,MAAM,EAAE,MAAM,MAAM,CAAC;AAKtD,gBAAgB;AAChB,MAAM,MAAM,aAAa,GAAG;IAC1B,MAAM,EAAE,OAAO,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,gBAAgB;AAChB,eAAO,IAAI,aAAa,EAAE,aAAa,CAAC;AAQxC,gBAAgB;AAChB,eAAO,MAAM,GAAG,cAKf,CAAC;AAEF,gBAAgB;AAChB,eAAO,MAAM,gBAAgB,sBAAuB,aAAa,SAMhE,CAAC;AAqBF
|
|
1
|
+
{"version":3,"file":"log.d.ts","sourceRoot":"","sources":["../src/log.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAqB,MAAM,EAAE,MAAM,MAAM,CAAC;AAKtD,gBAAgB;AAChB,MAAM,MAAM,aAAa,GAAG;IAC1B,MAAM,EAAE,OAAO,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,gBAAgB;AAChB,eAAO,IAAI,aAAa,EAAE,aAAa,CAAC;AAQxC,gBAAgB;AAChB,eAAO,MAAM,GAAG,cAKf,CAAC;AAEF,gBAAgB;AAChB,eAAO,MAAM,gBAAgB,sBAAuB,aAAa,SAMhE,CAAC;AAqBF;;;;;GAKG;AACH,eAAO,MAAM,iBAAiB,YAgB7B,CAAC"}
|
package/dist/log.js
CHANGED
|
@@ -38,12 +38,12 @@ const enableOtelLogging = () => {
|
|
|
38
38
|
}
|
|
39
39
|
otelEnabled = true;
|
|
40
40
|
const { pretty, level } = loggerOptions;
|
|
41
|
-
const
|
|
41
|
+
const logLevel = level || "info";
|
|
42
42
|
const streams = [
|
|
43
|
-
{ stream: pretty ? pinoPretty({ colorize: true }) : process.stdout, level:
|
|
43
|
+
{ stream: pretty ? pinoPretty({ colorize: true }) : process.stdout, level: logLevel },
|
|
44
44
|
{ stream: new OtelDestination(), level: "debug" }
|
|
45
45
|
];
|
|
46
|
-
logger = pino({ level:
|
|
46
|
+
logger = pino({ level: logLevel }, multistream(streams));
|
|
47
47
|
};
|
|
48
48
|
export {
|
|
49
49
|
enableOtelLogging,
|
package/dist/log.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/log.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { Writable } from 'node:stream';\nimport type { DestinationStream, Logger } from 'pino';\nimport { multistream, pino } from 'pino';\nimport { build as pinoPretty } from 'pino-pretty';\nimport { type PinoLogObject, emitToOtel } from './telemetry/pino_otel_transport.js';\n\n/** @internal */\nexport type LoggerOptions = {\n pretty: boolean;\n level?: string;\n};\n\n/** @internal */\nexport let loggerOptions: LoggerOptions;\n\n/** @internal */\nlet logger: Logger | undefined = undefined;\n\n/** @internal */\nlet otelEnabled = false;\n\n/** @internal */\nexport const log = () => {\n if (!logger) {\n throw new TypeError('logger not initialized. did you forget to run initializeLogger()?');\n }\n return logger;\n};\n\n/** @internal */\nexport const initializeLogger = ({ pretty, level }: LoggerOptions) => {\n loggerOptions = { pretty, level };\n logger = pino(\n { level: level || 'info' },\n pretty ? pinoPretty({ colorize: true }) : process.stdout,\n );\n};\n\n/**\n * Custom Pino destination that parses JSON logs and emits to OTEL.\n * This receives the FULL serialized log including msg, level, time, etc.\n */\nclass OtelDestination extends Writable {\n _write(chunk: Buffer, _encoding: string, callback: (error?: Error | null) => void): void {\n try {\n const line = chunk.toString().trim();\n if (line) {\n const logObj = JSON.parse(line) as PinoLogObject;\n emitToOtel(logObj);\n }\n } catch {\n // Ignore parse errors (e.g., non-JSON lines)\n }\n callback();\n }\n}\n\n/**\n * Enable OTEL logging by reconfiguring the logger with multistream.\n * Uses a custom destination that receives full JSON logs (with msg, level, time).\n *\n *
|
|
1
|
+
{"version":3,"sources":["../src/log.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { Writable } from 'node:stream';\nimport type { DestinationStream, Logger } from 'pino';\nimport { multistream, pino } from 'pino';\nimport { build as pinoPretty } from 'pino-pretty';\nimport { type PinoLogObject, emitToOtel } from './telemetry/pino_otel_transport.js';\n\n/** @internal */\nexport type LoggerOptions = {\n pretty: boolean;\n level?: string;\n};\n\n/** @internal */\nexport let loggerOptions: LoggerOptions;\n\n/** @internal */\nlet logger: Logger | undefined = undefined;\n\n/** @internal */\nlet otelEnabled = false;\n\n/** @internal */\nexport const log = () => {\n if (!logger) {\n throw new TypeError('logger not initialized. did you forget to run initializeLogger()?');\n }\n return logger;\n};\n\n/** @internal */\nexport const initializeLogger = ({ pretty, level }: LoggerOptions) => {\n loggerOptions = { pretty, level };\n logger = pino(\n { level: level || 'info' },\n pretty ? pinoPretty({ colorize: true }) : process.stdout,\n );\n};\n\n/**\n * Custom Pino destination that parses JSON logs and emits to OTEL.\n * This receives the FULL serialized log including msg, level, time, etc.\n */\nclass OtelDestination extends Writable {\n _write(chunk: Buffer, _encoding: string, callback: (error?: Error | null) => void): void {\n try {\n const line = chunk.toString().trim();\n if (line) {\n const logObj = JSON.parse(line) as PinoLogObject;\n emitToOtel(logObj);\n }\n } catch {\n // Ignore parse errors (e.g., non-JSON lines)\n }\n callback();\n }\n}\n\n/**\n * Enable OTEL logging by reconfiguring the logger with multistream.\n * Uses a custom destination that receives full JSON logs (with msg, level, time).\n *\n * @internal\n */\nexport const enableOtelLogging = () => {\n if (otelEnabled || !logger) {\n console.warn('OTEL logging already enabled or logger not initialized');\n return;\n }\n otelEnabled = true;\n\n const { pretty, level } = loggerOptions;\n\n const logLevel = level || 'info';\n const streams: { stream: DestinationStream; level: string }[] = [\n { stream: pretty ? pinoPretty({ colorize: true }) : process.stdout, level: logLevel },\n { stream: new OtelDestination(), level: 'debug' },\n ];\n\n logger = pino({ level: logLevel }, multistream(streams));\n};\n"],"mappings":"AAGA,SAAS,gBAAgB;AAEzB,SAAS,aAAa,YAAY;AAClC,SAAS,SAAS,kBAAkB;AACpC,SAA6B,kBAAkB;AASxC,IAAI;AAGX,IAAI,SAA6B;AAGjC,IAAI,cAAc;AAGX,MAAM,MAAM,MAAM;AACvB,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,UAAU,mEAAmE;AAAA,EACzF;AACA,SAAO;AACT;AAGO,MAAM,mBAAmB,CAAC,EAAE,QAAQ,MAAM,MAAqB;AACpE,kBAAgB,EAAE,QAAQ,MAAM;AAChC,WAAS;AAAA,IACP,EAAE,OAAO,SAAS,OAAO;AAAA,IACzB,SAAS,WAAW,EAAE,UAAU,KAAK,CAAC,IAAI,QAAQ;AAAA,EACpD;AACF;AAMA,MAAM,wBAAwB,SAAS;AAAA,EACrC,OAAO,OAAe,WAAmB,UAAgD;AACvF,QAAI;AACF,YAAM,OAAO,MAAM,SAAS,EAAE,KAAK;AACnC,UAAI,MAAM;AACR,cAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,QAAQ;AAAA,IAER;AACA,aAAS;AAAA,EACX;AACF;AAQO,MAAM,oBAAoB,MAAM;AACrC,MAAI,eAAe,CAAC,QAAQ;AAC1B,YAAQ,KAAK,wDAAwD;AACrE;AAAA,EACF;AACA,gBAAc;AAEd,QAAM,EAAE,QAAQ,MAAM,IAAI;AAE1B,QAAM,WAAW,SAAS;AAC1B,QAAM,UAA0D;AAAA,IAC9D,EAAE,QAAQ,SAAS,WAAW,EAAE,UAAU,KAAK,CAAC,IAAI,QAAQ,QAAQ,OAAO,SAAS;AAAA,IACpF,EAAE,QAAQ,IAAI,gBAAgB,GAAG,OAAO,QAAQ;AAAA,EAClD;AAEA,WAAS,KAAK,EAAE,OAAO,SAAS,GAAG,YAAY,OAAO,CAAC;AACzD;","names":[]}
|
package/dist/stt/stt.cjs
CHANGED
|
@@ -90,7 +90,7 @@ class SpeechStream {
|
|
|
90
90
|
this.neededSampleRate = sampleRate;
|
|
91
91
|
this.monitorMetrics();
|
|
92
92
|
this.pumpInput();
|
|
93
|
-
(0, import_utils.startSoon)(() => this.mainTask().
|
|
93
|
+
(0, import_utils.startSoon)(() => this.mainTask().finally(() => this.queue.close()));
|
|
94
94
|
}
|
|
95
95
|
async mainTask() {
|
|
96
96
|
for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {
|