@livekit/agents 1.0.38 → 1.0.39
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/inference/llm.cjs +7 -3
- package/dist/inference/llm.cjs.map +1 -1
- package/dist/inference/llm.d.cts +5 -6
- package/dist/inference/llm.d.ts +5 -6
- package/dist/inference/llm.d.ts.map +1 -1
- package/dist/inference/llm.js +7 -3
- package/dist/inference/llm.js.map +1 -1
- package/dist/inference/stt.cjs.map +1 -1
- package/dist/inference/stt.d.cts +5 -4
- package/dist/inference/stt.d.ts +5 -4
- package/dist/inference/stt.d.ts.map +1 -1
- package/dist/inference/stt.js.map +1 -1
- package/dist/inference/tts.cjs.map +1 -1
- package/dist/inference/tts.d.cts +10 -7
- package/dist/inference/tts.d.ts +10 -7
- package/dist/inference/tts.d.ts.map +1 -1
- package/dist/inference/tts.js.map +1 -1
- package/dist/stt/stream_adapter.cjs +9 -1
- package/dist/stt/stream_adapter.cjs.map +1 -1
- package/dist/stt/stream_adapter.d.ts.map +1 -1
- package/dist/stt/stream_adapter.js +9 -1
- package/dist/stt/stream_adapter.js.map +1 -1
- package/dist/utils.cjs +5 -0
- package/dist/utils.cjs.map +1 -1
- package/dist/utils.d.cts +8 -0
- package/dist/utils.d.ts +8 -0
- package/dist/utils.d.ts.map +1 -1
- package/dist/utils.js +4 -0
- package/dist/utils.js.map +1 -1
- package/dist/voice/agent.cjs +1 -2
- package/dist/voice/agent.cjs.map +1 -1
- package/dist/voice/agent.js +1 -2
- package/dist/voice/agent.js.map +1 -1
- package/dist/voice/agent_activity.cjs +23 -14
- package/dist/voice/agent_activity.cjs.map +1 -1
- package/dist/voice/agent_activity.d.cts +1 -0
- package/dist/voice/agent_activity.d.ts +1 -0
- package/dist/voice/agent_activity.d.ts.map +1 -1
- package/dist/voice/agent_activity.js +23 -14
- package/dist/voice/agent_activity.js.map +1 -1
- package/package.json +2 -2
- package/src/inference/llm.ts +20 -15
- package/src/inference/stt.ts +9 -7
- package/src/inference/tts.ts +36 -16
- package/src/stt/stream_adapter.ts +12 -1
- package/src/utils.ts +14 -0
- package/src/voice/agent.ts +2 -2
- package/src/voice/agent_activity.ts +36 -15
package/dist/inference/llm.cjs
CHANGED
|
@@ -267,15 +267,19 @@ class LLMStream extends llm.LLMStream {
|
|
|
267
267
|
if (this.toolCallId && tool.id && tool.index !== this.toolIndex) {
|
|
268
268
|
callChunk = this.createRunningToolCallChunk(id, delta);
|
|
269
269
|
this.toolCallId = this.fncName = this.fncRawArguments = void 0;
|
|
270
|
-
this.toolExtra = void 0;
|
|
271
270
|
}
|
|
272
271
|
if (tool.function.name) {
|
|
273
272
|
this.toolIndex = tool.index;
|
|
274
273
|
this.toolCallId = tool.id;
|
|
275
274
|
this.fncName = tool.function.name;
|
|
276
275
|
this.fncRawArguments = tool.function.arguments || "";
|
|
277
|
-
|
|
278
|
-
|
|
276
|
+
const newToolExtra = (
|
|
277
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
278
|
+
tool.extra_content ?? void 0
|
|
279
|
+
);
|
|
280
|
+
if (newToolExtra) {
|
|
281
|
+
this.toolExtra = newToolExtra;
|
|
282
|
+
}
|
|
279
283
|
} else if (tool.function.arguments) {
|
|
280
284
|
this.fncRawArguments = (this.fncRawArguments || "") + tool.function.arguments;
|
|
281
285
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/inference/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport OpenAI from 'openai';\nimport {\n APIConnectionError,\n APIStatusError,\n APITimeoutError,\n DEFAULT_API_CONNECT_OPTIONS,\n type Expand,\n toError,\n} from '../index.js';\nimport * as llm from '../llm/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { type AnyString, createAccessToken } from './utils.js';\n\nconst DEFAULT_BASE_URL = 'https://agent-gateway.livekit.cloud/v1';\n\nexport type OpenAIModels =\n | 'openai/gpt-5'\n | 'openai/gpt-5-mini'\n | 'openai/gpt-5-nano'\n | 'openai/gpt-4.1'\n | 'openai/gpt-4.1-mini'\n | 'openai/gpt-4.1-nano'\n | 'openai/gpt-4o'\n | 'openai/gpt-4o-mini'\n | 'openai/gpt-oss-120b';\n\nexport type GoogleModels =\n | 'google/gemini-3-pro-preview'\n | 'google/gemini-3-flash-preview'\n | 'google/gemini-2.5-pro'\n | 'google/gemini-2.5-flash'\n | 'google/gemini-2.5-flash-lite'\n | 'google/gemini-2.0-flash'\n | 'google/gemini-2.0-flash-lite';\n\nexport type QwenModels = 'qwen/qwen3-235b-a22b-instruct';\n\nexport type KimiModels = 'moonshotai/kimi-k2-instruct';\n\nexport type DeepSeekModels = 'deepseek-ai/deepseek-v3';\n\ntype ChatCompletionPredictionContentParam =\n Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;\ntype WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;\ntype ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;\ntype Verbosity = 'low' | 'medium' | 'high';\n\nexport interface ChatCompletionOptions extends Record<string, unknown> {\n frequency_penalty?: number;\n logit_bias?: Record<string, number>;\n logprobs?: boolean;\n max_completion_tokens?: number;\n max_tokens?: number;\n metadata?: Record<string, string>;\n modalities?: Array<'text' | 'audio'>;\n n?: number;\n parallel_tool_calls?: boolean;\n prediction?: ChatCompletionPredictionContentParam | null;\n presence_penalty?: number;\n prompt_cache_key?: string;\n reasoning_effort?: 'minimal' | 'low' | 'medium' | 'high';\n safety_identifier?: string;\n seed?: number;\n service_tier?: 'auto' | 'default' | 'flex' | 'scale' | 'priority';\n stop?: string | string[];\n store?: boolean;\n temperature?: number;\n top_logprobs?: number;\n top_p?: number;\n user?: string;\n verbosity?: Verbosity;\n web_search_options?: WebSearchOptions;\n\n // livekit-typed arguments\n tool_choice?: ToolChoice;\n // TODO(brian): support response format\n // response_format?: OpenAI.Chat.Completions.ChatCompletionCreateParams['response_format']\n}\n\nexport type LLMModels =\n | OpenAIModels\n | GoogleModels\n | QwenModels\n | KimiModels\n | DeepSeekModels\n | AnyString;\n\nexport interface InferenceLLMOptions {\n model: LLMModels;\n provider?: string;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: ChatCompletionOptions;\n strictToolSchema?: boolean;\n}\n\nexport interface GatewayOptions {\n apiKey: string;\n apiSecret: string;\n}\n\n/**\n * Livekit Cloud Inference LLM\n */\nexport class LLM extends llm.LLM {\n private client: OpenAI;\n private opts: InferenceLLMOptions;\n\n constructor(opts: {\n model: LLMModels;\n provider?: string;\n baseURL?: string;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: InferenceLLMOptions['modelOptions'];\n strictToolSchema?: boolean;\n }) {\n super();\n\n const {\n model,\n provider,\n baseURL,\n apiKey,\n apiSecret,\n modelOptions,\n strictToolSchema = false,\n } = opts;\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n provider,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions: modelOptions || {},\n strictToolSchema,\n };\n\n this.client = new OpenAI({\n baseURL: this.opts.baseURL,\n apiKey: '', // leave a temporary empty string to avoid OpenAI complain about missing key\n });\n }\n\n label(): string {\n return 'inference.LLM';\n }\n\n get model(): string {\n return this.opts.model;\n }\n\n static fromModelString(modelString: string): LLM {\n return new LLM({ model: modelString });\n }\n\n chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n // TODO(AJS-270): Add response_format parameter support\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n // TODO(AJS-270): Add responseFormat parameter\n extraKwargs?: Record<string, unknown>;\n }): LLMStream {\n let modelOptions: Record<string, unknown> = { ...(extraKwargs || {}) };\n\n parallelToolCalls =\n parallelToolCalls !== undefined\n ? parallelToolCalls\n : this.opts.modelOptions.parallel_tool_calls;\n\n if (toolCtx && Object.keys(toolCtx).length > 0 && parallelToolCalls !== undefined) {\n modelOptions.parallel_tool_calls = parallelToolCalls;\n }\n\n toolChoice =\n toolChoice !== undefined\n ? toolChoice\n : (this.opts.modelOptions.tool_choice as llm.ToolChoice | undefined);\n\n if (toolChoice) {\n modelOptions.tool_choice = toolChoice as ToolChoice;\n }\n\n // TODO(AJS-270): Add response_format support here\n\n modelOptions = { ...modelOptions, ...this.opts.modelOptions };\n\n return new LLMStream(this, {\n model: this.opts.model,\n provider: this.opts.provider,\n client: this.client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema: this.opts.strictToolSchema ?? false, // default to false if not set\n gatewayOptions: {\n apiKey: this.opts.apiKey,\n apiSecret: this.opts.apiSecret,\n },\n });\n }\n}\n\nexport class LLMStream extends llm.LLMStream {\n private model: LLMModels;\n private provider?: string;\n private providerFmt: llm.ProviderFormat;\n private client: OpenAI;\n private modelOptions: Record<string, unknown>;\n private strictToolSchema: boolean;\n\n private gatewayOptions?: GatewayOptions;\n private toolCallId?: string;\n private toolIndex?: number;\n private fncName?: string;\n private fncRawArguments?: string;\n private toolExtra?: Record<string, unknown>;\n\n constructor(\n llm: LLM,\n {\n model,\n provider,\n client,\n chatCtx,\n toolCtx,\n gatewayOptions,\n connOptions,\n modelOptions,\n providerFmt,\n strictToolSchema,\n }: {\n model: LLMModels;\n provider?: string;\n client: OpenAI;\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n gatewayOptions?: GatewayOptions;\n connOptions: APIConnectOptions;\n modelOptions: Record<string, unknown>;\n providerFmt?: llm.ProviderFormat;\n strictToolSchema: boolean;\n },\n ) {\n super(llm, { chatCtx, toolCtx, connOptions });\n this.client = client;\n this.gatewayOptions = gatewayOptions;\n this.provider = provider;\n this.providerFmt = providerFmt || 'openai';\n this.modelOptions = modelOptions;\n this.model = model;\n this.strictToolSchema = strictToolSchema;\n }\n\n protected async run(): Promise<void> {\n // current function call that we're waiting for full completion (args are streamed)\n // (defined inside the run method to make sure the state is reset for each run/attempt)\n let retryable = true;\n this.toolCallId = this.fncName = this.fncRawArguments = this.toolIndex = undefined;\n this.toolExtra = undefined;\n\n try {\n const messages = (await this.chatCtx.toProviderFormat(\n this.providerFmt,\n )) as OpenAI.ChatCompletionMessageParam[];\n\n const tools = this.toolCtx\n ? Object.entries(this.toolCtx).map(([name, func]) => {\n const oaiParams = {\n type: 'function' as const,\n function: {\n name,\n description: func.description,\n parameters: llm.toJsonSchema(\n func.parameters,\n true,\n this.strictToolSchema,\n ) as unknown as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function']['parameters'],\n } as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function'],\n };\n\n if (this.strictToolSchema) {\n oaiParams.function.strict = true;\n }\n\n return oaiParams;\n })\n : undefined;\n\n const requestOptions: Record<string, unknown> = { ...this.modelOptions };\n if (!tools) {\n delete requestOptions.tool_choice;\n }\n\n // Dynamically set the access token for the LiveKit Agent Gateway API\n if (this.gatewayOptions) {\n this.client.apiKey = await createAccessToken(\n this.gatewayOptions.apiKey,\n this.gatewayOptions.apiSecret,\n );\n }\n\n if (this.provider) {\n const extraHeaders = requestOptions.extra_headers\n ? (requestOptions.extra_headers as Record<string, string>)\n : {};\n extraHeaders['X-LiveKit-Inference-Provider'] = this.provider;\n requestOptions.extra_headers = extraHeaders;\n }\n\n const stream = await this.client.chat.completions.create(\n {\n model: this.model,\n messages,\n tools,\n stream: true,\n stream_options: { include_usage: true },\n ...requestOptions,\n },\n {\n timeout: this.connOptions.timeoutMs,\n },\n );\n\n for await (const chunk of stream) {\n for (const choice of chunk.choices) {\n if (this.abortController.signal.aborted) {\n break;\n }\n const chatChunk = this.parseChoice(chunk.id, choice);\n if (chatChunk) {\n retryable = false;\n this.queue.put(chatChunk);\n }\n }\n\n if (chunk.usage) {\n const usage = chunk.usage;\n retryable = false;\n this.queue.put({\n id: chunk.id,\n usage: {\n completionTokens: usage.completion_tokens,\n promptTokens: usage.prompt_tokens,\n promptCachedTokens: usage.prompt_tokens_details?.cached_tokens || 0,\n totalTokens: usage.total_tokens,\n },\n });\n }\n }\n } catch (error) {\n if (error instanceof OpenAI.APIConnectionTimeoutError) {\n throw new APITimeoutError({ options: { retryable } });\n } else if (error instanceof OpenAI.APIError) {\n throw new APIStatusError({\n message: error.message,\n options: {\n statusCode: error.status,\n body: error.error,\n requestId: error.requestID,\n retryable,\n },\n });\n } else {\n throw new APIConnectionError({\n message: toError(error).message,\n options: { retryable },\n });\n }\n }\n }\n\n private parseChoice(\n id: string,\n choice: OpenAI.ChatCompletionChunk.Choice,\n ): llm.ChatChunk | undefined {\n const delta = choice.delta;\n\n // https://github.com/livekit/agents/issues/688\n // the delta can be None when using Azure OpenAI (content filtering)\n if (delta === undefined) return undefined;\n\n if (delta.tool_calls) {\n // check if we have functions to calls\n for (const tool of delta.tool_calls) {\n if (!tool.function) {\n continue; // oai may add other tools in the future\n }\n\n /**\n * The way OpenAI streams tool calls is a bit tricky.\n *\n * For any new tool call, it first emits a delta tool call with id, and function name,\n * the rest of the delta chunks will only stream the remaining arguments string,\n * until a new tool call is started or the tool call is finished.\n * See below for an example.\n *\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)\n * [ChoiceDeltaToolCall(index=0, id='call_LaVeHWUHpef9K1sd5UO8TtLg', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"P', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris\\}', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id='call_ThU4OmMdQXnnVmpXGOCknXIB', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"T', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='okyo', name=None), type=None)]\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='tool_calls', index=0, logprobs=None)\n */\n let callChunk: llm.ChatChunk | undefined;\n // If we have a previous tool call and this is a new one, emit the previous\n if (this.toolCallId && tool.id && tool.index !== this.toolIndex) {\n callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n this.toolExtra = undefined;\n }\n\n // Start or continue building the current tool call\n if (tool.function.name) {\n this.toolIndex = tool.index;\n this.toolCallId = tool.id;\n this.fncName = tool.function.name;\n this.fncRawArguments = tool.function.arguments || '';\n // Extract extra from tool call (e.g., Google thought signatures)\n this.toolExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((tool as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n } else if (tool.function.arguments) {\n this.fncRawArguments = (this.fncRawArguments || '') + tool.function.arguments;\n }\n\n if (callChunk) {\n return callChunk;\n }\n }\n }\n\n // If we're done with tool calls, emit the final one\n if (\n choice.finish_reason &&\n ['tool_calls', 'stop'].includes(choice.finish_reason) &&\n this.toolCallId !== undefined\n ) {\n const callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n this.toolExtra = undefined;\n return callChunk;\n }\n\n // Extract extra from delta (e.g., Google thought signatures on text parts)\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n // Regular content message\n if (!delta.content && !deltaExtra) {\n return undefined;\n }\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n },\n };\n }\n\n private createRunningToolCallChunk(\n id: string,\n delta: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta,\n ): llm.ChatChunk {\n const toolExtra = this.toolExtra ? { ...this.toolExtra } : {};\n const thoughtSignature = this.extractThoughtSignature(toolExtra);\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n toolCalls: [\n llm.FunctionCall.create({\n callId: this.toolCallId || '',\n name: this.fncName || '',\n args: this.fncRawArguments || '',\n extra: toolExtra,\n thoughtSignature,\n }),\n ],\n },\n };\n }\n\n private extractThoughtSignature(extra?: Record<string, unknown>): string | undefined {\n const googleExtra = extra?.google;\n if (googleExtra && typeof googleExtra === 'object') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (googleExtra as any).thoughtSignature || (googleExtra as any).thought_signature;\n }\n return undefined;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,oBAAmB;AACnB,eAOO;AACP,UAAqB;AAErB,mBAAkD;AAElD,MAAM,mBAAmB;AA4FlB,MAAM,YAAY,IAAI,IAAI;AAAA,EACvB;AAAA,EACA;AAAA,EAER,YAAY,MAQT;AACD,UAAM;AAEN,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,IACrB,IAAI;AAEJ,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,cAAc,gBAAgB,CAAC;AAAA,MAC/B;AAAA,IACF;AAEA,SAAK,SAAS,IAAI,cAAAA,QAAO;AAAA,MACvB,SAAS,KAAK,KAAK;AAAA,MACnB,QAAQ;AAAA;AAAA,IACV,CAAC;AAAA,EACH;AAAA,EAEA,QAAgB;AACd,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA,EAEA,OAAO,gBAAgB,aAA0B;AAC/C,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,KAAK;AAAA,IACH;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA;AAAA,IAEA;AAAA,EACF,GAQc;AACZ,QAAI,eAAwC,EAAE,GAAI,eAAe,CAAC,EAAG;AAErE,wBACE,sBAAsB,SAClB,oBACA,KAAK,KAAK,aAAa;AAE7B,QAAI,WAAW,OAAO,KAAK,OAAO,EAAE,SAAS,KAAK,sBAAsB,QAAW;AACjF,mBAAa,sBAAsB;AAAA,IACrC;AAEA,iBACE,eAAe,SACX,aACC,KAAK,KAAK,aAAa;AAE9B,QAAI,YAAY;AACd,mBAAa,cAAc;AAAA,IAC7B;AAIA,mBAAe,EAAE,GAAG,cAAc,GAAG,KAAK,KAAK,aAAa;AAE5D,WAAO,IAAI,UAAU,MAAM;AAAA,MACzB,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU,KAAK,KAAK;AAAA,MACpB,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB,KAAK,KAAK,oBAAoB;AAAA;AAAA,MAChD,gBAAgB;AAAA,QACd,QAAQ,KAAK,KAAK;AAAA,QAClB,WAAW,KAAK,KAAK;AAAA,MACvB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,MAAM,kBAAkB,IAAI,UAAU;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACEC,MACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAYA;AACA,UAAMA,MAAK,EAAE,SAAS,SAAS,YAAY,CAAC;AAC5C,SAAK,SAAS;AACd,SAAK,iBAAiB;AACtB,SAAK,WAAW;AAChB,SAAK,cAAc,eAAe;AAClC,SAAK,eAAe;AACpB,SAAK,QAAQ;AACb,SAAK,mBAAmB;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AA1RvC;AA6RI,QAAI,YAAY;AAChB,SAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB,KAAK,YAAY;AACzE,SAAK,YAAY;AAEjB,QAAI;AACF,YAAM,WAAY,MAAM,KAAK,QAAQ;AAAA,QACnC,KAAK;AAAA,MACP;AAEA,YAAM,QAAQ,KAAK,UACf,OAAO,QAAQ,KAAK,OAAO,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM;AACjD,cAAM,YAAY;AAAA,UAChB,MAAM;AAAA,UACN,UAAU;AAAA,YACR;AAAA,YACA,aAAa,KAAK;AAAA,YAClB,YAAY,IAAI;AAAA,cACd,KAAK;AAAA,cACL;AAAA,cACA,KAAK;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,kBAAkB;AACzB,oBAAU,SAAS,SAAS;AAAA,QAC9B;AAEA,eAAO;AAAA,MACT,CAAC,IACD;AAEJ,YAAM,iBAA0C,EAAE,GAAG,KAAK,aAAa;AACvE,UAAI,CAAC,OAAO;AACV,eAAO,eAAe;AAAA,MACxB;AAGA,UAAI,KAAK,gBAAgB;AACvB,aAAK,OAAO,SAAS,UAAM;AAAA,UACzB,KAAK,eAAe;AAAA,UACpB,KAAK,eAAe;AAAA,QACtB;AAAA,MACF;AAEA,UAAI,KAAK,UAAU;AACjB,cAAM,eAAe,eAAe,gBAC/B,eAAe,gBAChB,CAAC;AACL,qBAAa,8BAA8B,IAAI,KAAK;AACpD,uBAAe,gBAAgB;AAAA,MACjC;AAEA,YAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY;AAAA,QAChD;AAAA,UACE,OAAO,KAAK;AAAA,UACZ;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,UACtC,GAAG;AAAA,QACL;AAAA,QACA;AAAA,UACE,SAAS,KAAK,YAAY;AAAA,QAC5B;AAAA,MACF;AAEA,uBAAiB,SAAS,QAAQ;AAChC,mBAAW,UAAU,MAAM,SAAS;AAClC,cAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,UACF;AACA,gBAAM,YAAY,KAAK,YAAY,MAAM,IAAI,MAAM;AACnD,cAAI,WAAW;AACb,wBAAY;AACZ,iBAAK,MAAM,IAAI,SAAS;AAAA,UAC1B;AAAA,QACF;AAEA,YAAI,MAAM,OAAO;AACf,gBAAM,QAAQ,MAAM;AACpB,sBAAY;AACZ,eAAK,MAAM,IAAI;AAAA,YACb,IAAI,MAAM;AAAA,YACV,OAAO;AAAA,cACL,kBAAkB,MAAM;AAAA,cACxB,cAAc,MAAM;AAAA,cACpB,sBAAoB,WAAM,0BAAN,mBAA6B,kBAAiB;AAAA,cAClE,aAAa,MAAM;AAAA,YACrB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB,cAAAD,QAAO,2BAA2B;AACrD,cAAM,IAAI,yBAAgB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AAAA,MACtD,WAAW,iBAAiB,cAAAA,QAAO,UAAU;AAC3C,cAAM,IAAI,wBAAe;AAAA,UACvB,SAAS,MAAM;AAAA,UACf,SAAS;AAAA,YACP,YAAY,MAAM;AAAA,YAClB,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,YACjB;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,cAAM,IAAI,4BAAmB;AAAA,UAC3B,aAAS,kBAAQ,KAAK,EAAE;AAAA,UACxB,SAAS,EAAE,UAAU;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,YACN,IACA,QAC2B;AAC3B,UAAM,QAAQ,OAAO;AAIrB,QAAI,UAAU,OAAW,QAAO;AAEhC,QAAI,MAAM,YAAY;AAEpB,iBAAW,QAAQ,MAAM,YAAY;AACnC,YAAI,CAAC,KAAK,UAAU;AAClB;AAAA,QACF;AAmBA,YAAI;AAEJ,YAAI,KAAK,cAAc,KAAK,MAAM,KAAK,UAAU,KAAK,WAAW;AAC/D,sBAAY,KAAK,2BAA2B,IAAI,KAAK;AACrD,eAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AACxD,eAAK,YAAY;AAAA,QACnB;AAGA,YAAI,KAAK,SAAS,MAAM;AACtB,eAAK,YAAY,KAAK;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,UAAU,KAAK,SAAS;AAC7B,eAAK,kBAAkB,KAAK,SAAS,aAAa;AAElD,eAAK;AAAA,UAED,KAAa,iBAAyD;AAAA,QAC5E,WAAW,KAAK,SAAS,WAAW;AAClC,eAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK,SAAS;AAAA,QACtE;AAEA,YAAI,WAAW;AACb,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAGA,QACE,OAAO,iBACP,CAAC,cAAc,MAAM,EAAE,SAAS,OAAO,aAAa,KACpD,KAAK,eAAe,QACpB;AACA,YAAM,YAAY,KAAK,2BAA2B,IAAI,KAAK;AAC3D,WAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AACxD,WAAK,YAAY;AACjB,aAAO;AAAA,IACT;AAGA,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAG3E,QAAI,CAAC,MAAM,WAAW,CAAC,YAAY;AACjC,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,2BACN,IACA,OACe;AACf,UAAM,YAAY,KAAK,YAAY,EAAE,GAAG,KAAK,UAAU,IAAI,CAAC;AAC5D,UAAM,mBAAmB,KAAK,wBAAwB,SAAS;AAC/D,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAE3E,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,QACP,WAAW;AAAA,UACT,IAAI,aAAa,OAAO;AAAA,YACtB,QAAQ,KAAK,cAAc;AAAA,YAC3B,MAAM,KAAK,WAAW;AAAA,YACtB,MAAM,KAAK,mBAAmB;AAAA,YAC9B,OAAO;AAAA,YACP;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,wBAAwB,OAAqD;AACnF,UAAM,cAAc,+BAAO;AAC3B,QAAI,eAAe,OAAO,gBAAgB,UAAU;AAElD,aAAQ,YAAoB,oBAAqB,YAAoB;AAAA,IACvE;AACA,WAAO;AAAA,EACT;AACF;","names":["OpenAI","llm"]}
|
|
1
|
+
{"version":3,"sources":["../../src/inference/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport OpenAI from 'openai';\nimport {\n APIConnectionError,\n APIStatusError,\n APITimeoutError,\n DEFAULT_API_CONNECT_OPTIONS,\n type Expand,\n toError,\n} from '../index.js';\nimport * as llm from '../llm/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { type AnyString, createAccessToken } from './utils.js';\n\nconst DEFAULT_BASE_URL = 'https://agent-gateway.livekit.cloud/v1';\n\nexport type OpenAIModels =\n | 'openai/gpt-5.2'\n | 'openai/gpt-5.2-chat-latest'\n | 'openai/gpt-5.1'\n | 'openai/gpt-5.1-chat-latest'\n | 'openai/gpt-5'\n | 'openai/gpt-5-mini'\n | 'openai/gpt-5-nano'\n | 'openai/gpt-4.1'\n | 'openai/gpt-4.1-mini'\n | 'openai/gpt-4.1-nano'\n | 'openai/gpt-4o'\n | 'openai/gpt-4o-mini'\n | 'openai/gpt-oss-120b';\n\nexport type GoogleModels =\n | 'google/gemini-3-pro'\n | 'google/gemini-3-flash'\n | 'google/gemini-2.5-pro'\n | 'google/gemini-2.5-flash'\n | 'google/gemini-2.5-flash-lite'\n | 'google/gemini-2.0-flash'\n | 'google/gemini-2.0-flash-lite';\n\nexport type MoonshotModels = 'moonshotai/kimi-k2-instruct';\n\nexport type DeepSeekModels = 'deepseek-ai/deepseek-v3' | 'deepseek-ai/deepseek-v3.2';\n\ntype ChatCompletionPredictionContentParam =\n Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;\ntype WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;\ntype ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;\ntype Verbosity = 'low' | 'medium' | 'high';\n\nexport interface ChatCompletionOptions extends Record<string, unknown> {\n frequency_penalty?: number;\n logit_bias?: Record<string, number>;\n logprobs?: boolean;\n max_completion_tokens?: number;\n max_tokens?: number;\n metadata?: Record<string, string>;\n modalities?: Array<'text' | 'audio'>;\n n?: number;\n parallel_tool_calls?: boolean;\n prediction?: ChatCompletionPredictionContentParam | null;\n presence_penalty?: number;\n prompt_cache_key?: string;\n reasoning_effort?: 'minimal' | 'low' | 'medium' | 'high';\n safety_identifier?: string;\n seed?: number;\n service_tier?: 'auto' | 'default' | 'flex' | 'scale' | 'priority';\n stop?: string | string[];\n store?: boolean;\n temperature?: number;\n top_logprobs?: number;\n top_p?: number;\n user?: string;\n verbosity?: Verbosity;\n web_search_options?: WebSearchOptions;\n\n // livekit-typed arguments\n tool_choice?: ToolChoice;\n // TODO(brian): support response format\n // response_format?: OpenAI.Chat.Completions.ChatCompletionCreateParams['response_format']\n}\n\nexport type LLMModels = OpenAIModels | GoogleModels | MoonshotModels | DeepSeekModels | AnyString;\n\nexport interface InferenceLLMOptions {\n model: LLMModels;\n provider?: string;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: ChatCompletionOptions;\n strictToolSchema?: boolean;\n}\n\nexport interface GatewayOptions {\n apiKey: string;\n apiSecret: string;\n}\n\n/**\n * Livekit Cloud Inference LLM\n */\nexport class LLM extends llm.LLM {\n private client: OpenAI;\n private opts: InferenceLLMOptions;\n\n constructor(opts: {\n model: LLMModels;\n provider?: string;\n baseURL?: string;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: InferenceLLMOptions['modelOptions'];\n strictToolSchema?: boolean;\n }) {\n super();\n\n const {\n model,\n provider,\n baseURL,\n apiKey,\n apiSecret,\n modelOptions,\n strictToolSchema = false,\n } = opts;\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n provider,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions: modelOptions || {},\n strictToolSchema,\n };\n\n this.client = new OpenAI({\n baseURL: this.opts.baseURL,\n apiKey: '', // leave a temporary empty string to avoid OpenAI complain about missing key\n });\n }\n\n label(): string {\n return 'inference.LLM';\n }\n\n get model(): string {\n return this.opts.model;\n }\n\n static fromModelString(modelString: string): LLM {\n return new LLM({ model: modelString });\n }\n\n chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n // TODO(AJS-270): Add response_format parameter support\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n // TODO(AJS-270): Add responseFormat parameter\n extraKwargs?: Record<string, unknown>;\n }): LLMStream {\n let modelOptions: Record<string, unknown> = { ...(extraKwargs || {}) };\n\n parallelToolCalls =\n parallelToolCalls !== undefined\n ? parallelToolCalls\n : this.opts.modelOptions.parallel_tool_calls;\n\n if (toolCtx && Object.keys(toolCtx).length > 0 && parallelToolCalls !== undefined) {\n modelOptions.parallel_tool_calls = parallelToolCalls;\n }\n\n toolChoice =\n toolChoice !== undefined\n ? toolChoice\n : (this.opts.modelOptions.tool_choice as llm.ToolChoice | undefined);\n\n if (toolChoice) {\n modelOptions.tool_choice = toolChoice as ToolChoice;\n }\n\n // TODO(AJS-270): Add response_format support here\n\n modelOptions = { ...modelOptions, ...this.opts.modelOptions };\n\n return new LLMStream(this, {\n model: this.opts.model,\n provider: this.opts.provider,\n client: this.client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema: this.opts.strictToolSchema ?? false, // default to false if not set\n gatewayOptions: {\n apiKey: this.opts.apiKey,\n apiSecret: this.opts.apiSecret,\n },\n });\n }\n}\n\nexport class LLMStream extends llm.LLMStream {\n private model: LLMModels;\n private provider?: string;\n private providerFmt: llm.ProviderFormat;\n private client: OpenAI;\n private modelOptions: Record<string, unknown>;\n private strictToolSchema: boolean;\n\n private gatewayOptions?: GatewayOptions;\n private toolCallId?: string;\n private toolIndex?: number;\n private fncName?: string;\n private fncRawArguments?: string;\n private toolExtra?: Record<string, unknown>;\n\n constructor(\n llm: LLM,\n {\n model,\n provider,\n client,\n chatCtx,\n toolCtx,\n gatewayOptions,\n connOptions,\n modelOptions,\n providerFmt,\n strictToolSchema,\n }: {\n model: LLMModels;\n provider?: string;\n client: OpenAI;\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n gatewayOptions?: GatewayOptions;\n connOptions: APIConnectOptions;\n modelOptions: Record<string, unknown>;\n providerFmt?: llm.ProviderFormat;\n strictToolSchema: boolean;\n },\n ) {\n super(llm, { chatCtx, toolCtx, connOptions });\n this.client = client;\n this.gatewayOptions = gatewayOptions;\n this.provider = provider;\n this.providerFmt = providerFmt || 'openai';\n this.modelOptions = modelOptions;\n this.model = model;\n this.strictToolSchema = strictToolSchema;\n }\n\n protected async run(): Promise<void> {\n // current function call that we're waiting for full completion (args are streamed)\n // (defined inside the run method to make sure the state is reset for each run/attempt)\n let retryable = true;\n this.toolCallId = this.fncName = this.fncRawArguments = this.toolIndex = undefined;\n this.toolExtra = undefined;\n\n try {\n const messages = (await this.chatCtx.toProviderFormat(\n this.providerFmt,\n )) as OpenAI.ChatCompletionMessageParam[];\n\n const tools = this.toolCtx\n ? Object.entries(this.toolCtx).map(([name, func]) => {\n const oaiParams = {\n type: 'function' as const,\n function: {\n name,\n description: func.description,\n parameters: llm.toJsonSchema(\n func.parameters,\n true,\n this.strictToolSchema,\n ) as unknown as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function']['parameters'],\n } as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function'],\n };\n\n if (this.strictToolSchema) {\n oaiParams.function.strict = true;\n }\n\n return oaiParams;\n })\n : undefined;\n\n const requestOptions: Record<string, unknown> = { ...this.modelOptions };\n if (!tools) {\n delete requestOptions.tool_choice;\n }\n\n // Dynamically set the access token for the LiveKit Agent Gateway API\n if (this.gatewayOptions) {\n this.client.apiKey = await createAccessToken(\n this.gatewayOptions.apiKey,\n this.gatewayOptions.apiSecret,\n );\n }\n\n if (this.provider) {\n const extraHeaders = requestOptions.extra_headers\n ? (requestOptions.extra_headers as Record<string, string>)\n : {};\n extraHeaders['X-LiveKit-Inference-Provider'] = this.provider;\n requestOptions.extra_headers = extraHeaders;\n }\n\n const stream = await this.client.chat.completions.create(\n {\n model: this.model,\n messages,\n tools,\n stream: true,\n stream_options: { include_usage: true },\n ...requestOptions,\n },\n {\n timeout: this.connOptions.timeoutMs,\n },\n );\n\n for await (const chunk of stream) {\n for (const choice of chunk.choices) {\n if (this.abortController.signal.aborted) {\n break;\n }\n const chatChunk = this.parseChoice(chunk.id, choice);\n if (chatChunk) {\n retryable = false;\n this.queue.put(chatChunk);\n }\n }\n\n if (chunk.usage) {\n const usage = chunk.usage;\n retryable = false;\n this.queue.put({\n id: chunk.id,\n usage: {\n completionTokens: usage.completion_tokens,\n promptTokens: usage.prompt_tokens,\n promptCachedTokens: usage.prompt_tokens_details?.cached_tokens || 0,\n totalTokens: usage.total_tokens,\n },\n });\n }\n }\n } catch (error) {\n if (error instanceof OpenAI.APIConnectionTimeoutError) {\n throw new APITimeoutError({ options: { retryable } });\n } else if (error instanceof OpenAI.APIError) {\n throw new APIStatusError({\n message: error.message,\n options: {\n statusCode: error.status,\n body: error.error,\n requestId: error.requestID,\n retryable,\n },\n });\n } else {\n throw new APIConnectionError({\n message: toError(error).message,\n options: { retryable },\n });\n }\n }\n }\n\n private parseChoice(\n id: string,\n choice: OpenAI.ChatCompletionChunk.Choice,\n ): llm.ChatChunk | undefined {\n const delta = choice.delta;\n\n // https://github.com/livekit/agents/issues/688\n // the delta can be None when using Azure OpenAI (content filtering)\n if (delta === undefined) return undefined;\n\n if (delta.tool_calls) {\n // check if we have functions to calls\n for (const tool of delta.tool_calls) {\n if (!tool.function) {\n continue; // oai may add other tools in the future\n }\n\n /**\n * The way OpenAI streams tool calls is a bit tricky.\n *\n * For any new tool call, it first emits a delta tool call with id, and function name,\n * the rest of the delta chunks will only stream the remaining arguments string,\n * until a new tool call is started or the tool call is finished.\n * See below for an example.\n *\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)\n * [ChoiceDeltaToolCall(index=0, id='call_LaVeHWUHpef9K1sd5UO8TtLg', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"P', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris\\}', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id='call_ThU4OmMdQXnnVmpXGOCknXIB', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"T', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='okyo', name=None), type=None)]\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='tool_calls', index=0, logprobs=None)\n */\n let callChunk: llm.ChatChunk | undefined;\n // If we have a previous tool call and this is a new one, emit the previous\n if (this.toolCallId && tool.id && tool.index !== this.toolIndex) {\n callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n // Note: We intentionally do NOT reset toolExtra here.\n // For Gemini 3+, the thought_signature is only provided on the first tool call\n // in a parallel batch, but must be applied to ALL tool calls in the batch.\n // We preserve toolExtra so subsequent tool calls inherit the thought_signature.\n }\n\n // Start or continue building the current tool call\n if (tool.function.name) {\n this.toolIndex = tool.index;\n this.toolCallId = tool.id;\n this.fncName = tool.function.name;\n this.fncRawArguments = tool.function.arguments || '';\n // Extract extra from tool call (e.g., Google thought signatures)\n // Only update toolExtra if this tool call has extra_content.\n // Otherwise, inherit from previous tool call (for parallel Gemini tool calls).\n const newToolExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((tool as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n if (newToolExtra) {\n this.toolExtra = newToolExtra;\n }\n } else if (tool.function.arguments) {\n this.fncRawArguments = (this.fncRawArguments || '') + tool.function.arguments;\n }\n\n if (callChunk) {\n return callChunk;\n }\n }\n }\n\n // If we're done with tool calls, emit the final one\n if (\n choice.finish_reason &&\n ['tool_calls', 'stop'].includes(choice.finish_reason) &&\n this.toolCallId !== undefined\n ) {\n const callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n // Reset toolExtra at the end of the response (not between parallel tool calls)\n this.toolExtra = undefined;\n return callChunk;\n }\n\n // Extract extra from delta (e.g., Google thought signatures on text parts)\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n // Regular content message\n if (!delta.content && !deltaExtra) {\n return undefined;\n }\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n },\n };\n }\n\n private createRunningToolCallChunk(\n id: string,\n delta: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta,\n ): llm.ChatChunk {\n const toolExtra = this.toolExtra ? { ...this.toolExtra } : {};\n const thoughtSignature = this.extractThoughtSignature(toolExtra);\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n toolCalls: [\n llm.FunctionCall.create({\n callId: this.toolCallId || '',\n name: this.fncName || '',\n args: this.fncRawArguments || '',\n extra: toolExtra,\n thoughtSignature,\n }),\n ],\n },\n };\n }\n\n private extractThoughtSignature(extra?: Record<string, unknown>): string | undefined {\n const googleExtra = extra?.google;\n if (googleExtra && typeof googleExtra === 'object') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (googleExtra as any).thoughtSignature || (googleExtra as any).thought_signature;\n }\n return undefined;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,oBAAmB;AACnB,eAOO;AACP,UAAqB;AAErB,mBAAkD;AAElD,MAAM,mBAAmB;AAwFlB,MAAM,YAAY,IAAI,IAAI;AAAA,EACvB;AAAA,EACA;AAAA,EAER,YAAY,MAQT;AACD,UAAM;AAEN,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,IACrB,IAAI;AAEJ,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,cAAc,gBAAgB,CAAC;AAAA,MAC/B;AAAA,IACF;AAEA,SAAK,SAAS,IAAI,cAAAA,QAAO;AAAA,MACvB,SAAS,KAAK,KAAK;AAAA,MACnB,QAAQ;AAAA;AAAA,IACV,CAAC;AAAA,EACH;AAAA,EAEA,QAAgB;AACd,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA,EAEA,OAAO,gBAAgB,aAA0B;AAC/C,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,KAAK;AAAA,IACH;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA;AAAA,IAEA;AAAA,EACF,GAQc;AACZ,QAAI,eAAwC,EAAE,GAAI,eAAe,CAAC,EAAG;AAErE,wBACE,sBAAsB,SAClB,oBACA,KAAK,KAAK,aAAa;AAE7B,QAAI,WAAW,OAAO,KAAK,OAAO,EAAE,SAAS,KAAK,sBAAsB,QAAW;AACjF,mBAAa,sBAAsB;AAAA,IACrC;AAEA,iBACE,eAAe,SACX,aACC,KAAK,KAAK,aAAa;AAE9B,QAAI,YAAY;AACd,mBAAa,cAAc;AAAA,IAC7B;AAIA,mBAAe,EAAE,GAAG,cAAc,GAAG,KAAK,KAAK,aAAa;AAE5D,WAAO,IAAI,UAAU,MAAM;AAAA,MACzB,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU,KAAK,KAAK;AAAA,MACpB,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB,KAAK,KAAK,oBAAoB;AAAA;AAAA,MAChD,gBAAgB;AAAA,QACd,QAAQ,KAAK,KAAK;AAAA,QAClB,WAAW,KAAK,KAAK;AAAA,MACvB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,MAAM,kBAAkB,IAAI,UAAU;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACEC,MACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAYA;AACA,UAAMA,MAAK,EAAE,SAAS,SAAS,YAAY,CAAC;AAC5C,SAAK,SAAS;AACd,SAAK,iBAAiB;AACtB,SAAK,WAAW;AAChB,SAAK,cAAc,eAAe;AAClC,SAAK,eAAe;AACpB,SAAK,QAAQ;AACb,SAAK,mBAAmB;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AAtRvC;AAyRI,QAAI,YAAY;AAChB,SAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB,KAAK,YAAY;AACzE,SAAK,YAAY;AAEjB,QAAI;AACF,YAAM,WAAY,MAAM,KAAK,QAAQ;AAAA,QACnC,KAAK;AAAA,MACP;AAEA,YAAM,QAAQ,KAAK,UACf,OAAO,QAAQ,KAAK,OAAO,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM;AACjD,cAAM,YAAY;AAAA,UAChB,MAAM;AAAA,UACN,UAAU;AAAA,YACR;AAAA,YACA,aAAa,KAAK;AAAA,YAClB,YAAY,IAAI;AAAA,cACd,KAAK;AAAA,cACL;AAAA,cACA,KAAK;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,kBAAkB;AACzB,oBAAU,SAAS,SAAS;AAAA,QAC9B;AAEA,eAAO;AAAA,MACT,CAAC,IACD;AAEJ,YAAM,iBAA0C,EAAE,GAAG,KAAK,aAAa;AACvE,UAAI,CAAC,OAAO;AACV,eAAO,eAAe;AAAA,MACxB;AAGA,UAAI,KAAK,gBAAgB;AACvB,aAAK,OAAO,SAAS,UAAM;AAAA,UACzB,KAAK,eAAe;AAAA,UACpB,KAAK,eAAe;AAAA,QACtB;AAAA,MACF;AAEA,UAAI,KAAK,UAAU;AACjB,cAAM,eAAe,eAAe,gBAC/B,eAAe,gBAChB,CAAC;AACL,qBAAa,8BAA8B,IAAI,KAAK;AACpD,uBAAe,gBAAgB;AAAA,MACjC;AAEA,YAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY;AAAA,QAChD;AAAA,UACE,OAAO,KAAK;AAAA,UACZ;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,UACtC,GAAG;AAAA,QACL;AAAA,QACA;AAAA,UACE,SAAS,KAAK,YAAY;AAAA,QAC5B;AAAA,MACF;AAEA,uBAAiB,SAAS,QAAQ;AAChC,mBAAW,UAAU,MAAM,SAAS;AAClC,cAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,UACF;AACA,gBAAM,YAAY,KAAK,YAAY,MAAM,IAAI,MAAM;AACnD,cAAI,WAAW;AACb,wBAAY;AACZ,iBAAK,MAAM,IAAI,SAAS;AAAA,UAC1B;AAAA,QACF;AAEA,YAAI,MAAM,OAAO;AACf,gBAAM,QAAQ,MAAM;AACpB,sBAAY;AACZ,eAAK,MAAM,IAAI;AAAA,YACb,IAAI,MAAM;AAAA,YACV,OAAO;AAAA,cACL,kBAAkB,MAAM;AAAA,cACxB,cAAc,MAAM;AAAA,cACpB,sBAAoB,WAAM,0BAAN,mBAA6B,kBAAiB;AAAA,cAClE,aAAa,MAAM;AAAA,YACrB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB,cAAAD,QAAO,2BAA2B;AACrD,cAAM,IAAI,yBAAgB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AAAA,MACtD,WAAW,iBAAiB,cAAAA,QAAO,UAAU;AAC3C,cAAM,IAAI,wBAAe;AAAA,UACvB,SAAS,MAAM;AAAA,UACf,SAAS;AAAA,YACP,YAAY,MAAM;AAAA,YAClB,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,YACjB;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,cAAM,IAAI,4BAAmB;AAAA,UAC3B,aAAS,kBAAQ,KAAK,EAAE;AAAA,UACxB,SAAS,EAAE,UAAU;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,YACN,IACA,QAC2B;AAC3B,UAAM,QAAQ,OAAO;AAIrB,QAAI,UAAU,OAAW,QAAO;AAEhC,QAAI,MAAM,YAAY;AAEpB,iBAAW,QAAQ,MAAM,YAAY;AACnC,YAAI,CAAC,KAAK,UAAU;AAClB;AAAA,QACF;AAmBA,YAAI;AAEJ,YAAI,KAAK,cAAc,KAAK,MAAM,KAAK,UAAU,KAAK,WAAW;AAC/D,sBAAY,KAAK,2BAA2B,IAAI,KAAK;AACrD,eAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AAAA,QAK1D;AAGA,YAAI,KAAK,SAAS,MAAM;AACtB,eAAK,YAAY,KAAK;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,UAAU,KAAK,SAAS;AAC7B,eAAK,kBAAkB,KAAK,SAAS,aAAa;AAIlD,gBAAM;AAAA;AAAA,YAEF,KAAa,iBAAyD;AAAA;AAC1E,cAAI,cAAc;AAChB,iBAAK,YAAY;AAAA,UACnB;AAAA,QACF,WAAW,KAAK,SAAS,WAAW;AAClC,eAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK,SAAS;AAAA,QACtE;AAEA,YAAI,WAAW;AACb,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAGA,QACE,OAAO,iBACP,CAAC,cAAc,MAAM,EAAE,SAAS,OAAO,aAAa,KACpD,KAAK,eAAe,QACpB;AACA,YAAM,YAAY,KAAK,2BAA2B,IAAI,KAAK;AAC3D,WAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AAExD,WAAK,YAAY;AACjB,aAAO;AAAA,IACT;AAGA,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAG3E,QAAI,CAAC,MAAM,WAAW,CAAC,YAAY;AACjC,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,2BACN,IACA,OACe;AACf,UAAM,YAAY,KAAK,YAAY,EAAE,GAAG,KAAK,UAAU,IAAI,CAAC;AAC5D,UAAM,mBAAmB,KAAK,wBAAwB,SAAS;AAC/D,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAE3E,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,QACP,WAAW;AAAA,UACT,IAAI,aAAa,OAAO;AAAA,YACtB,QAAQ,KAAK,cAAc;AAAA,YAC3B,MAAM,KAAK,WAAW;AAAA,YACtB,MAAM,KAAK,mBAAmB;AAAA,YAC9B,OAAO;AAAA,YACP;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,wBAAwB,OAAqD;AACnF,UAAM,cAAc,+BAAO;AAC3B,QAAI,eAAe,OAAO,gBAAgB,UAAU;AAElD,aAAQ,YAAoB,oBAAqB,YAAoB;AAAA,IACvE;AACA,WAAO;AAAA,EACT;AACF;","names":["OpenAI","llm"]}
|
package/dist/inference/llm.d.cts
CHANGED
|
@@ -3,11 +3,10 @@ import { type Expand } from '../index.js';
|
|
|
3
3
|
import * as llm from '../llm/index.js';
|
|
4
4
|
import type { APIConnectOptions } from '../types.js';
|
|
5
5
|
import { type AnyString } from './utils.js';
|
|
6
|
-
export type OpenAIModels = 'openai/gpt-5' | 'openai/gpt-5-mini' | 'openai/gpt-5-nano' | 'openai/gpt-4.1' | 'openai/gpt-4.1-mini' | 'openai/gpt-4.1-nano' | 'openai/gpt-4o' | 'openai/gpt-4o-mini' | 'openai/gpt-oss-120b';
|
|
7
|
-
export type GoogleModels = 'google/gemini-3-pro
|
|
8
|
-
export type
|
|
9
|
-
export type
|
|
10
|
-
export type DeepSeekModels = 'deepseek-ai/deepseek-v3';
|
|
6
|
+
export type OpenAIModels = 'openai/gpt-5.2' | 'openai/gpt-5.2-chat-latest' | 'openai/gpt-5.1' | 'openai/gpt-5.1-chat-latest' | 'openai/gpt-5' | 'openai/gpt-5-mini' | 'openai/gpt-5-nano' | 'openai/gpt-4.1' | 'openai/gpt-4.1-mini' | 'openai/gpt-4.1-nano' | 'openai/gpt-4o' | 'openai/gpt-4o-mini' | 'openai/gpt-oss-120b';
|
|
7
|
+
export type GoogleModels = 'google/gemini-3-pro' | 'google/gemini-3-flash' | 'google/gemini-2.5-pro' | 'google/gemini-2.5-flash' | 'google/gemini-2.5-flash-lite' | 'google/gemini-2.0-flash' | 'google/gemini-2.0-flash-lite';
|
|
8
|
+
export type MoonshotModels = 'moonshotai/kimi-k2-instruct';
|
|
9
|
+
export type DeepSeekModels = 'deepseek-ai/deepseek-v3' | 'deepseek-ai/deepseek-v3.2';
|
|
11
10
|
type ChatCompletionPredictionContentParam = Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;
|
|
12
11
|
type WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;
|
|
13
12
|
type ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;
|
|
@@ -39,7 +38,7 @@ export interface ChatCompletionOptions extends Record<string, unknown> {
|
|
|
39
38
|
web_search_options?: WebSearchOptions;
|
|
40
39
|
tool_choice?: ToolChoice;
|
|
41
40
|
}
|
|
42
|
-
export type LLMModels = OpenAIModels | GoogleModels |
|
|
41
|
+
export type LLMModels = OpenAIModels | GoogleModels | MoonshotModels | DeepSeekModels | AnyString;
|
|
43
42
|
export interface InferenceLLMOptions {
|
|
44
43
|
model: LLMModels;
|
|
45
44
|
provider?: string;
|
package/dist/inference/llm.d.ts
CHANGED
|
@@ -3,11 +3,10 @@ import { type Expand } from '../index.js';
|
|
|
3
3
|
import * as llm from '../llm/index.js';
|
|
4
4
|
import type { APIConnectOptions } from '../types.js';
|
|
5
5
|
import { type AnyString } from './utils.js';
|
|
6
|
-
export type OpenAIModels = 'openai/gpt-5' | 'openai/gpt-5-mini' | 'openai/gpt-5-nano' | 'openai/gpt-4.1' | 'openai/gpt-4.1-mini' | 'openai/gpt-4.1-nano' | 'openai/gpt-4o' | 'openai/gpt-4o-mini' | 'openai/gpt-oss-120b';
|
|
7
|
-
export type GoogleModels = 'google/gemini-3-pro
|
|
8
|
-
export type
|
|
9
|
-
export type
|
|
10
|
-
export type DeepSeekModels = 'deepseek-ai/deepseek-v3';
|
|
6
|
+
export type OpenAIModels = 'openai/gpt-5.2' | 'openai/gpt-5.2-chat-latest' | 'openai/gpt-5.1' | 'openai/gpt-5.1-chat-latest' | 'openai/gpt-5' | 'openai/gpt-5-mini' | 'openai/gpt-5-nano' | 'openai/gpt-4.1' | 'openai/gpt-4.1-mini' | 'openai/gpt-4.1-nano' | 'openai/gpt-4o' | 'openai/gpt-4o-mini' | 'openai/gpt-oss-120b';
|
|
7
|
+
export type GoogleModels = 'google/gemini-3-pro' | 'google/gemini-3-flash' | 'google/gemini-2.5-pro' | 'google/gemini-2.5-flash' | 'google/gemini-2.5-flash-lite' | 'google/gemini-2.0-flash' | 'google/gemini-2.0-flash-lite';
|
|
8
|
+
export type MoonshotModels = 'moonshotai/kimi-k2-instruct';
|
|
9
|
+
export type DeepSeekModels = 'deepseek-ai/deepseek-v3' | 'deepseek-ai/deepseek-v3.2';
|
|
11
10
|
type ChatCompletionPredictionContentParam = Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;
|
|
12
11
|
type WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;
|
|
13
12
|
type ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;
|
|
@@ -39,7 +38,7 @@ export interface ChatCompletionOptions extends Record<string, unknown> {
|
|
|
39
38
|
web_search_options?: WebSearchOptions;
|
|
40
39
|
tool_choice?: ToolChoice;
|
|
41
40
|
}
|
|
42
|
-
export type LLMModels = OpenAIModels | GoogleModels |
|
|
41
|
+
export type LLMModels = OpenAIModels | GoogleModels | MoonshotModels | DeepSeekModels | AnyString;
|
|
43
42
|
export interface InferenceLLMOptions {
|
|
44
43
|
model: LLMModels;
|
|
45
44
|
provider?: string;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../../src/inference/llm.ts"],"names":[],"mappings":"AAGA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAC5B,OAAO,EAKL,KAAK,MAAM,EAEZ,MAAM,aAAa,CAAC;AACrB,OAAO,KAAK,GAAG,MAAM,iBAAiB,CAAC;AACvC,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACrD,OAAO,EAAE,KAAK,SAAS,EAAqB,MAAM,YAAY,CAAC;AAI/D,MAAM,MAAM,YAAY,GACpB,cAAc,GACd,mBAAmB,GACnB,mBAAmB,GACnB,gBAAgB,GAChB,qBAAqB,GACrB,qBAAqB,GACrB,eAAe,GACf,oBAAoB,GACpB,qBAAqB,CAAC;AAE1B,MAAM,MAAM,YAAY,GACpB,
|
|
1
|
+
{"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../../src/inference/llm.ts"],"names":[],"mappings":"AAGA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAC5B,OAAO,EAKL,KAAK,MAAM,EAEZ,MAAM,aAAa,CAAC;AACrB,OAAO,KAAK,GAAG,MAAM,iBAAiB,CAAC;AACvC,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACrD,OAAO,EAAE,KAAK,SAAS,EAAqB,MAAM,YAAY,CAAC;AAI/D,MAAM,MAAM,YAAY,GACpB,gBAAgB,GAChB,4BAA4B,GAC5B,gBAAgB,GAChB,4BAA4B,GAC5B,cAAc,GACd,mBAAmB,GACnB,mBAAmB,GACnB,gBAAgB,GAChB,qBAAqB,GACrB,qBAAqB,GACrB,eAAe,GACf,oBAAoB,GACpB,qBAAqB,CAAC;AAE1B,MAAM,MAAM,YAAY,GACpB,qBAAqB,GACrB,uBAAuB,GACvB,uBAAuB,GACvB,yBAAyB,GACzB,8BAA8B,GAC9B,yBAAyB,GACzB,8BAA8B,CAAC;AAEnC,MAAM,MAAM,cAAc,GAAG,6BAA6B,CAAC;AAE3D,MAAM,MAAM,cAAc,GAAG,yBAAyB,GAAG,2BAA2B,CAAC;AAErF,KAAK,oCAAoC,GACvC,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,+BAA+B,CAAC,CAAC;AAClE,KAAK,gBAAgB,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,0BAA0B,CAAC,gBAAgB,CAAC,CAAC;AACpG,KAAK,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,0BAA0B,CAAC,aAAa,CAAC,CAAC,CAAC;AAC5F,KAAK,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;AAE3C,MAAM,WAAW,qBAAsB,SAAQ,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IACpE,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACpC,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAClC,UAAU,CAAC,EAAE,KAAK,CAAC,MAAM,GAAG,OAAO,CAAC,CAAC;IACrC,CAAC,CAAC,EAAE,MAAM,CAAC;IACX,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,UAAU,CAAC,EAAE,oCAAoC,GAAG,IAAI,CAAC;IACzD,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IACzD,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,MAAM,GAAG,OAAO,GAAG,UAAU,CAAC;IAClE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;IACzB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,SAAS,CAAC;IACtB,kBAAkB,CAAC,EAAE,gBAAgB,CAAC;IAGtC,WAAW,CAAC,EAAE,UAAU,CAAC;CAG1B;AAED,MAAM,MAAM,SAAS,GAAG,YAAY,GAAG,YAAY,GAAG,cAAc,GAAG,cAAc,GAAG,SAAS,CAAC;AAElG,MAAM,WAAW,mBAAmB;IAClC,KAAK,EAAE,SAAS,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,YAAY,EAAE,qBAAqB,CAAC;IACpC,gBAAgB,CAAC,EAAE,OAAO,CAAC;CAC5B;AAED,MAAM,WAAW,cAAc;IAC7B,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,qBAAa,GAAI,SAAQ,GAAG,CAAC,GAAG;IAC9B,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,IAAI,CAAsB;gBAEtB,IAAI,EAAE;QAChB,KAAK,EAAE,SAAS,CAAC;QACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,YAAY,CAAC,EAAE,mBAAmB,CAAC,cAAc,CAAC,CAAC;QACnD,gBAAgB,CAAC,EAAE,OAAO,CAAC;KAC5B;IAyCD,KAAK,IAAI,MAAM;IAIf,IAAI,KAAK,IAAI,MAAM,CAElB;IAED,MAAM,CAAC,eAAe,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG;IAIhD,IAAI,CAAC,EACH,OAAO,EACP,OAAO,EACP,WAAyC,EACzC,iBAAiB,EACjB,UAAU,EAEV,WAAW,GACZ,EAAE;QACD,OAAO,EAAE,GAAG,CAAC,WAAW,CAAC;QACzB,OAAO,CAAC,EAAE,GAAG,CAAC,WAAW,CAAC;QAC1B,WAAW,CAAC,EAAE,iBAAiB,CAAC;QAChC,iBAAiB,CAAC,EAAE,OAAO,CAAC;QAC5B,UAAU,CAAC,EAAE,GAAG,CAAC,UAAU,CAAC;QAE5B,WAAW,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACvC,GAAG,SAAS;CAwCd;AAED,qBAAa,SAAU,SAAQ,GAAG,CAAC,SAAS;IAC1C,OAAO,CAAC,KAAK,CAAY;IACzB,OAAO,CAAC,QAAQ,CAAC,CAAS;IAC1B,OAAO,CAAC,WAAW,CAAqB;IACxC,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,YAAY,CAA0B;IAC9C,OAAO,CAAC,gBAAgB,CAAU;IAElC,OAAO,CAAC,cAAc,CAAC,CAAiB;IACxC,OAAO,CAAC,UAAU,CAAC,CAAS;IAC5B,OAAO,CAAC,SAAS,CAAC,CAAS;IAC3B,OAAO,CAAC,OAAO,CAAC,CAAS;IACzB,OAAO,CAAC,eAAe,CAAC,CAAS;IACjC,OAAO,CAAC,SAAS,CAAC,CAA0B;gBAG1C,GAAG,EAAE,GAAG,EACR,EACE,KAAK,EACL,QAAQ,EACR,MAAM,EACN,OAAO,EACP,OAAO,EACP,cAAc,EACd,WAAW,EACX,YAAY,EACZ,WAAW,EACX,gBAAgB,GACjB,EAAE;QACD,KAAK,EAAE,SAAS,CAAC;QACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,MAAM,EAAE,MAAM,CAAC;QACf,OAAO,EAAE,GAAG,CAAC,WAAW,CAAC;QACzB,OAAO,CAAC,EAAE,GAAG,CAAC,WAAW,CAAC;QAC1B,cAAc,CAAC,EAAE,cAAc,CAAC;QAChC,WAAW,EAAE,iBAAiB,CAAC;QAC/B,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACtC,WAAW,CAAC,EAAE,GAAG,CAAC,cAAc,CAAC;QACjC,gBAAgB,EAAE,OAAO,CAAC;KAC3B;cAYa,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IAsHpC,OAAO,CAAC,WAAW;IAuGnB,OAAO,CAAC,0BAA0B;IA6BlC,OAAO,CAAC,uBAAuB;CAQhC"}
|
package/dist/inference/llm.js
CHANGED
|
@@ -239,15 +239,19 @@ class LLMStream extends llm.LLMStream {
|
|
|
239
239
|
if (this.toolCallId && tool.id && tool.index !== this.toolIndex) {
|
|
240
240
|
callChunk = this.createRunningToolCallChunk(id, delta);
|
|
241
241
|
this.toolCallId = this.fncName = this.fncRawArguments = void 0;
|
|
242
|
-
this.toolExtra = void 0;
|
|
243
242
|
}
|
|
244
243
|
if (tool.function.name) {
|
|
245
244
|
this.toolIndex = tool.index;
|
|
246
245
|
this.toolCallId = tool.id;
|
|
247
246
|
this.fncName = tool.function.name;
|
|
248
247
|
this.fncRawArguments = tool.function.arguments || "";
|
|
249
|
-
|
|
250
|
-
|
|
248
|
+
const newToolExtra = (
|
|
249
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
250
|
+
tool.extra_content ?? void 0
|
|
251
|
+
);
|
|
252
|
+
if (newToolExtra) {
|
|
253
|
+
this.toolExtra = newToolExtra;
|
|
254
|
+
}
|
|
251
255
|
} else if (tool.function.arguments) {
|
|
252
256
|
this.fncRawArguments = (this.fncRawArguments || "") + tool.function.arguments;
|
|
253
257
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/inference/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport OpenAI from 'openai';\nimport {\n APIConnectionError,\n APIStatusError,\n APITimeoutError,\n DEFAULT_API_CONNECT_OPTIONS,\n type Expand,\n toError,\n} from '../index.js';\nimport * as llm from '../llm/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { type AnyString, createAccessToken } from './utils.js';\n\nconst DEFAULT_BASE_URL = 'https://agent-gateway.livekit.cloud/v1';\n\nexport type OpenAIModels =\n | 'openai/gpt-5'\n | 'openai/gpt-5-mini'\n | 'openai/gpt-5-nano'\n | 'openai/gpt-4.1'\n | 'openai/gpt-4.1-mini'\n | 'openai/gpt-4.1-nano'\n | 'openai/gpt-4o'\n | 'openai/gpt-4o-mini'\n | 'openai/gpt-oss-120b';\n\nexport type GoogleModels =\n | 'google/gemini-3-pro-preview'\n | 'google/gemini-3-flash-preview'\n | 'google/gemini-2.5-pro'\n | 'google/gemini-2.5-flash'\n | 'google/gemini-2.5-flash-lite'\n | 'google/gemini-2.0-flash'\n | 'google/gemini-2.0-flash-lite';\n\nexport type QwenModels = 'qwen/qwen3-235b-a22b-instruct';\n\nexport type KimiModels = 'moonshotai/kimi-k2-instruct';\n\nexport type DeepSeekModels = 'deepseek-ai/deepseek-v3';\n\ntype ChatCompletionPredictionContentParam =\n Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;\ntype WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;\ntype ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;\ntype Verbosity = 'low' | 'medium' | 'high';\n\nexport interface ChatCompletionOptions extends Record<string, unknown> {\n frequency_penalty?: number;\n logit_bias?: Record<string, number>;\n logprobs?: boolean;\n max_completion_tokens?: number;\n max_tokens?: number;\n metadata?: Record<string, string>;\n modalities?: Array<'text' | 'audio'>;\n n?: number;\n parallel_tool_calls?: boolean;\n prediction?: ChatCompletionPredictionContentParam | null;\n presence_penalty?: number;\n prompt_cache_key?: string;\n reasoning_effort?: 'minimal' | 'low' | 'medium' | 'high';\n safety_identifier?: string;\n seed?: number;\n service_tier?: 'auto' | 'default' | 'flex' | 'scale' | 'priority';\n stop?: string | string[];\n store?: boolean;\n temperature?: number;\n top_logprobs?: number;\n top_p?: number;\n user?: string;\n verbosity?: Verbosity;\n web_search_options?: WebSearchOptions;\n\n // livekit-typed arguments\n tool_choice?: ToolChoice;\n // TODO(brian): support response format\n // response_format?: OpenAI.Chat.Completions.ChatCompletionCreateParams['response_format']\n}\n\nexport type LLMModels =\n | OpenAIModels\n | GoogleModels\n | QwenModels\n | KimiModels\n | DeepSeekModels\n | AnyString;\n\nexport interface InferenceLLMOptions {\n model: LLMModels;\n provider?: string;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: ChatCompletionOptions;\n strictToolSchema?: boolean;\n}\n\nexport interface GatewayOptions {\n apiKey: string;\n apiSecret: string;\n}\n\n/**\n * Livekit Cloud Inference LLM\n */\nexport class LLM extends llm.LLM {\n private client: OpenAI;\n private opts: InferenceLLMOptions;\n\n constructor(opts: {\n model: LLMModels;\n provider?: string;\n baseURL?: string;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: InferenceLLMOptions['modelOptions'];\n strictToolSchema?: boolean;\n }) {\n super();\n\n const {\n model,\n provider,\n baseURL,\n apiKey,\n apiSecret,\n modelOptions,\n strictToolSchema = false,\n } = opts;\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n provider,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions: modelOptions || {},\n strictToolSchema,\n };\n\n this.client = new OpenAI({\n baseURL: this.opts.baseURL,\n apiKey: '', // leave a temporary empty string to avoid OpenAI complain about missing key\n });\n }\n\n label(): string {\n return 'inference.LLM';\n }\n\n get model(): string {\n return this.opts.model;\n }\n\n static fromModelString(modelString: string): LLM {\n return new LLM({ model: modelString });\n }\n\n chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n // TODO(AJS-270): Add response_format parameter support\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n // TODO(AJS-270): Add responseFormat parameter\n extraKwargs?: Record<string, unknown>;\n }): LLMStream {\n let modelOptions: Record<string, unknown> = { ...(extraKwargs || {}) };\n\n parallelToolCalls =\n parallelToolCalls !== undefined\n ? parallelToolCalls\n : this.opts.modelOptions.parallel_tool_calls;\n\n if (toolCtx && Object.keys(toolCtx).length > 0 && parallelToolCalls !== undefined) {\n modelOptions.parallel_tool_calls = parallelToolCalls;\n }\n\n toolChoice =\n toolChoice !== undefined\n ? toolChoice\n : (this.opts.modelOptions.tool_choice as llm.ToolChoice | undefined);\n\n if (toolChoice) {\n modelOptions.tool_choice = toolChoice as ToolChoice;\n }\n\n // TODO(AJS-270): Add response_format support here\n\n modelOptions = { ...modelOptions, ...this.opts.modelOptions };\n\n return new LLMStream(this, {\n model: this.opts.model,\n provider: this.opts.provider,\n client: this.client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema: this.opts.strictToolSchema ?? false, // default to false if not set\n gatewayOptions: {\n apiKey: this.opts.apiKey,\n apiSecret: this.opts.apiSecret,\n },\n });\n }\n}\n\nexport class LLMStream extends llm.LLMStream {\n private model: LLMModels;\n private provider?: string;\n private providerFmt: llm.ProviderFormat;\n private client: OpenAI;\n private modelOptions: Record<string, unknown>;\n private strictToolSchema: boolean;\n\n private gatewayOptions?: GatewayOptions;\n private toolCallId?: string;\n private toolIndex?: number;\n private fncName?: string;\n private fncRawArguments?: string;\n private toolExtra?: Record<string, unknown>;\n\n constructor(\n llm: LLM,\n {\n model,\n provider,\n client,\n chatCtx,\n toolCtx,\n gatewayOptions,\n connOptions,\n modelOptions,\n providerFmt,\n strictToolSchema,\n }: {\n model: LLMModels;\n provider?: string;\n client: OpenAI;\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n gatewayOptions?: GatewayOptions;\n connOptions: APIConnectOptions;\n modelOptions: Record<string, unknown>;\n providerFmt?: llm.ProviderFormat;\n strictToolSchema: boolean;\n },\n ) {\n super(llm, { chatCtx, toolCtx, connOptions });\n this.client = client;\n this.gatewayOptions = gatewayOptions;\n this.provider = provider;\n this.providerFmt = providerFmt || 'openai';\n this.modelOptions = modelOptions;\n this.model = model;\n this.strictToolSchema = strictToolSchema;\n }\n\n protected async run(): Promise<void> {\n // current function call that we're waiting for full completion (args are streamed)\n // (defined inside the run method to make sure the state is reset for each run/attempt)\n let retryable = true;\n this.toolCallId = this.fncName = this.fncRawArguments = this.toolIndex = undefined;\n this.toolExtra = undefined;\n\n try {\n const messages = (await this.chatCtx.toProviderFormat(\n this.providerFmt,\n )) as OpenAI.ChatCompletionMessageParam[];\n\n const tools = this.toolCtx\n ? Object.entries(this.toolCtx).map(([name, func]) => {\n const oaiParams = {\n type: 'function' as const,\n function: {\n name,\n description: func.description,\n parameters: llm.toJsonSchema(\n func.parameters,\n true,\n this.strictToolSchema,\n ) as unknown as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function']['parameters'],\n } as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function'],\n };\n\n if (this.strictToolSchema) {\n oaiParams.function.strict = true;\n }\n\n return oaiParams;\n })\n : undefined;\n\n const requestOptions: Record<string, unknown> = { ...this.modelOptions };\n if (!tools) {\n delete requestOptions.tool_choice;\n }\n\n // Dynamically set the access token for the LiveKit Agent Gateway API\n if (this.gatewayOptions) {\n this.client.apiKey = await createAccessToken(\n this.gatewayOptions.apiKey,\n this.gatewayOptions.apiSecret,\n );\n }\n\n if (this.provider) {\n const extraHeaders = requestOptions.extra_headers\n ? (requestOptions.extra_headers as Record<string, string>)\n : {};\n extraHeaders['X-LiveKit-Inference-Provider'] = this.provider;\n requestOptions.extra_headers = extraHeaders;\n }\n\n const stream = await this.client.chat.completions.create(\n {\n model: this.model,\n messages,\n tools,\n stream: true,\n stream_options: { include_usage: true },\n ...requestOptions,\n },\n {\n timeout: this.connOptions.timeoutMs,\n },\n );\n\n for await (const chunk of stream) {\n for (const choice of chunk.choices) {\n if (this.abortController.signal.aborted) {\n break;\n }\n const chatChunk = this.parseChoice(chunk.id, choice);\n if (chatChunk) {\n retryable = false;\n this.queue.put(chatChunk);\n }\n }\n\n if (chunk.usage) {\n const usage = chunk.usage;\n retryable = false;\n this.queue.put({\n id: chunk.id,\n usage: {\n completionTokens: usage.completion_tokens,\n promptTokens: usage.prompt_tokens,\n promptCachedTokens: usage.prompt_tokens_details?.cached_tokens || 0,\n totalTokens: usage.total_tokens,\n },\n });\n }\n }\n } catch (error) {\n if (error instanceof OpenAI.APIConnectionTimeoutError) {\n throw new APITimeoutError({ options: { retryable } });\n } else if (error instanceof OpenAI.APIError) {\n throw new APIStatusError({\n message: error.message,\n options: {\n statusCode: error.status,\n body: error.error,\n requestId: error.requestID,\n retryable,\n },\n });\n } else {\n throw new APIConnectionError({\n message: toError(error).message,\n options: { retryable },\n });\n }\n }\n }\n\n private parseChoice(\n id: string,\n choice: OpenAI.ChatCompletionChunk.Choice,\n ): llm.ChatChunk | undefined {\n const delta = choice.delta;\n\n // https://github.com/livekit/agents/issues/688\n // the delta can be None when using Azure OpenAI (content filtering)\n if (delta === undefined) return undefined;\n\n if (delta.tool_calls) {\n // check if we have functions to calls\n for (const tool of delta.tool_calls) {\n if (!tool.function) {\n continue; // oai may add other tools in the future\n }\n\n /**\n * The way OpenAI streams tool calls is a bit tricky.\n *\n * For any new tool call, it first emits a delta tool call with id, and function name,\n * the rest of the delta chunks will only stream the remaining arguments string,\n * until a new tool call is started or the tool call is finished.\n * See below for an example.\n *\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)\n * [ChoiceDeltaToolCall(index=0, id='call_LaVeHWUHpef9K1sd5UO8TtLg', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"P', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris\\}', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id='call_ThU4OmMdQXnnVmpXGOCknXIB', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"T', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='okyo', name=None), type=None)]\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='tool_calls', index=0, logprobs=None)\n */\n let callChunk: llm.ChatChunk | undefined;\n // If we have a previous tool call and this is a new one, emit the previous\n if (this.toolCallId && tool.id && tool.index !== this.toolIndex) {\n callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n this.toolExtra = undefined;\n }\n\n // Start or continue building the current tool call\n if (tool.function.name) {\n this.toolIndex = tool.index;\n this.toolCallId = tool.id;\n this.fncName = tool.function.name;\n this.fncRawArguments = tool.function.arguments || '';\n // Extract extra from tool call (e.g., Google thought signatures)\n this.toolExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((tool as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n } else if (tool.function.arguments) {\n this.fncRawArguments = (this.fncRawArguments || '') + tool.function.arguments;\n }\n\n if (callChunk) {\n return callChunk;\n }\n }\n }\n\n // If we're done with tool calls, emit the final one\n if (\n choice.finish_reason &&\n ['tool_calls', 'stop'].includes(choice.finish_reason) &&\n this.toolCallId !== undefined\n ) {\n const callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n this.toolExtra = undefined;\n return callChunk;\n }\n\n // Extract extra from delta (e.g., Google thought signatures on text parts)\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n // Regular content message\n if (!delta.content && !deltaExtra) {\n return undefined;\n }\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n },\n };\n }\n\n private createRunningToolCallChunk(\n id: string,\n delta: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta,\n ): llm.ChatChunk {\n const toolExtra = this.toolExtra ? { ...this.toolExtra } : {};\n const thoughtSignature = this.extractThoughtSignature(toolExtra);\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n toolCalls: [\n llm.FunctionCall.create({\n callId: this.toolCallId || '',\n name: this.fncName || '',\n args: this.fncRawArguments || '',\n extra: toolExtra,\n thoughtSignature,\n }),\n ],\n },\n };\n }\n\n private extractThoughtSignature(extra?: Record<string, unknown>): string | undefined {\n const googleExtra = extra?.google;\n if (googleExtra && typeof googleExtra === 'object') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (googleExtra as any).thoughtSignature || (googleExtra as any).thought_signature;\n }\n return undefined;\n }\n}\n"],"mappings":"AAGA,OAAO,YAAY;AACnB;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP,YAAY,SAAS;AAErB,SAAyB,yBAAyB;AAElD,MAAM,mBAAmB;AA4FlB,MAAM,YAAY,IAAI,IAAI;AAAA,EACvB;AAAA,EACA;AAAA,EAER,YAAY,MAQT;AACD,UAAM;AAEN,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,IACrB,IAAI;AAEJ,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,cAAc,gBAAgB,CAAC;AAAA,MAC/B;AAAA,IACF;AAEA,SAAK,SAAS,IAAI,OAAO;AAAA,MACvB,SAAS,KAAK,KAAK;AAAA,MACnB,QAAQ;AAAA;AAAA,IACV,CAAC;AAAA,EACH;AAAA,EAEA,QAAgB;AACd,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA,EAEA,OAAO,gBAAgB,aAA0B;AAC/C,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,KAAK;AAAA,IACH;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA;AAAA,IAEA;AAAA,EACF,GAQc;AACZ,QAAI,eAAwC,EAAE,GAAI,eAAe,CAAC,EAAG;AAErE,wBACE,sBAAsB,SAClB,oBACA,KAAK,KAAK,aAAa;AAE7B,QAAI,WAAW,OAAO,KAAK,OAAO,EAAE,SAAS,KAAK,sBAAsB,QAAW;AACjF,mBAAa,sBAAsB;AAAA,IACrC;AAEA,iBACE,eAAe,SACX,aACC,KAAK,KAAK,aAAa;AAE9B,QAAI,YAAY;AACd,mBAAa,cAAc;AAAA,IAC7B;AAIA,mBAAe,EAAE,GAAG,cAAc,GAAG,KAAK,KAAK,aAAa;AAE5D,WAAO,IAAI,UAAU,MAAM;AAAA,MACzB,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU,KAAK,KAAK;AAAA,MACpB,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB,KAAK,KAAK,oBAAoB;AAAA;AAAA,MAChD,gBAAgB;AAAA,QACd,QAAQ,KAAK,KAAK;AAAA,QAClB,WAAW,KAAK,KAAK;AAAA,MACvB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,MAAM,kBAAkB,IAAI,UAAU;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACEA,MACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAYA;AACA,UAAMA,MAAK,EAAE,SAAS,SAAS,YAAY,CAAC;AAC5C,SAAK,SAAS;AACd,SAAK,iBAAiB;AACtB,SAAK,WAAW;AAChB,SAAK,cAAc,eAAe;AAClC,SAAK,eAAe;AACpB,SAAK,QAAQ;AACb,SAAK,mBAAmB;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AA1RvC;AA6RI,QAAI,YAAY;AAChB,SAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB,KAAK,YAAY;AACzE,SAAK,YAAY;AAEjB,QAAI;AACF,YAAM,WAAY,MAAM,KAAK,QAAQ;AAAA,QACnC,KAAK;AAAA,MACP;AAEA,YAAM,QAAQ,KAAK,UACf,OAAO,QAAQ,KAAK,OAAO,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM;AACjD,cAAM,YAAY;AAAA,UAChB,MAAM;AAAA,UACN,UAAU;AAAA,YACR;AAAA,YACA,aAAa,KAAK;AAAA,YAClB,YAAY,IAAI;AAAA,cACd,KAAK;AAAA,cACL;AAAA,cACA,KAAK;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,kBAAkB;AACzB,oBAAU,SAAS,SAAS;AAAA,QAC9B;AAEA,eAAO;AAAA,MACT,CAAC,IACD;AAEJ,YAAM,iBAA0C,EAAE,GAAG,KAAK,aAAa;AACvE,UAAI,CAAC,OAAO;AACV,eAAO,eAAe;AAAA,MACxB;AAGA,UAAI,KAAK,gBAAgB;AACvB,aAAK,OAAO,SAAS,MAAM;AAAA,UACzB,KAAK,eAAe;AAAA,UACpB,KAAK,eAAe;AAAA,QACtB;AAAA,MACF;AAEA,UAAI,KAAK,UAAU;AACjB,cAAM,eAAe,eAAe,gBAC/B,eAAe,gBAChB,CAAC;AACL,qBAAa,8BAA8B,IAAI,KAAK;AACpD,uBAAe,gBAAgB;AAAA,MACjC;AAEA,YAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY;AAAA,QAChD;AAAA,UACE,OAAO,KAAK;AAAA,UACZ;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,UACtC,GAAG;AAAA,QACL;AAAA,QACA;AAAA,UACE,SAAS,KAAK,YAAY;AAAA,QAC5B;AAAA,MACF;AAEA,uBAAiB,SAAS,QAAQ;AAChC,mBAAW,UAAU,MAAM,SAAS;AAClC,cAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,UACF;AACA,gBAAM,YAAY,KAAK,YAAY,MAAM,IAAI,MAAM;AACnD,cAAI,WAAW;AACb,wBAAY;AACZ,iBAAK,MAAM,IAAI,SAAS;AAAA,UAC1B;AAAA,QACF;AAEA,YAAI,MAAM,OAAO;AACf,gBAAM,QAAQ,MAAM;AACpB,sBAAY;AACZ,eAAK,MAAM,IAAI;AAAA,YACb,IAAI,MAAM;AAAA,YACV,OAAO;AAAA,cACL,kBAAkB,MAAM;AAAA,cACxB,cAAc,MAAM;AAAA,cACpB,sBAAoB,WAAM,0BAAN,mBAA6B,kBAAiB;AAAA,cAClE,aAAa,MAAM;AAAA,YACrB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB,OAAO,2BAA2B;AACrD,cAAM,IAAI,gBAAgB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AAAA,MACtD,WAAW,iBAAiB,OAAO,UAAU;AAC3C,cAAM,IAAI,eAAe;AAAA,UACvB,SAAS,MAAM;AAAA,UACf,SAAS;AAAA,YACP,YAAY,MAAM;AAAA,YAClB,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,YACjB;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS,QAAQ,KAAK,EAAE;AAAA,UACxB,SAAS,EAAE,UAAU;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,YACN,IACA,QAC2B;AAC3B,UAAM,QAAQ,OAAO;AAIrB,QAAI,UAAU,OAAW,QAAO;AAEhC,QAAI,MAAM,YAAY;AAEpB,iBAAW,QAAQ,MAAM,YAAY;AACnC,YAAI,CAAC,KAAK,UAAU;AAClB;AAAA,QACF;AAmBA,YAAI;AAEJ,YAAI,KAAK,cAAc,KAAK,MAAM,KAAK,UAAU,KAAK,WAAW;AAC/D,sBAAY,KAAK,2BAA2B,IAAI,KAAK;AACrD,eAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AACxD,eAAK,YAAY;AAAA,QACnB;AAGA,YAAI,KAAK,SAAS,MAAM;AACtB,eAAK,YAAY,KAAK;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,UAAU,KAAK,SAAS;AAC7B,eAAK,kBAAkB,KAAK,SAAS,aAAa;AAElD,eAAK;AAAA,UAED,KAAa,iBAAyD;AAAA,QAC5E,WAAW,KAAK,SAAS,WAAW;AAClC,eAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK,SAAS;AAAA,QACtE;AAEA,YAAI,WAAW;AACb,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAGA,QACE,OAAO,iBACP,CAAC,cAAc,MAAM,EAAE,SAAS,OAAO,aAAa,KACpD,KAAK,eAAe,QACpB;AACA,YAAM,YAAY,KAAK,2BAA2B,IAAI,KAAK;AAC3D,WAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AACxD,WAAK,YAAY;AACjB,aAAO;AAAA,IACT;AAGA,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAG3E,QAAI,CAAC,MAAM,WAAW,CAAC,YAAY;AACjC,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,2BACN,IACA,OACe;AACf,UAAM,YAAY,KAAK,YAAY,EAAE,GAAG,KAAK,UAAU,IAAI,CAAC;AAC5D,UAAM,mBAAmB,KAAK,wBAAwB,SAAS;AAC/D,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAE3E,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,QACP,WAAW;AAAA,UACT,IAAI,aAAa,OAAO;AAAA,YACtB,QAAQ,KAAK,cAAc;AAAA,YAC3B,MAAM,KAAK,WAAW;AAAA,YACtB,MAAM,KAAK,mBAAmB;AAAA,YAC9B,OAAO;AAAA,YACP;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,wBAAwB,OAAqD;AACnF,UAAM,cAAc,+BAAO;AAC3B,QAAI,eAAe,OAAO,gBAAgB,UAAU;AAElD,aAAQ,YAAoB,oBAAqB,YAAoB;AAAA,IACvE;AACA,WAAO;AAAA,EACT;AACF;","names":["llm"]}
|
|
1
|
+
{"version":3,"sources":["../../src/inference/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport OpenAI from 'openai';\nimport {\n APIConnectionError,\n APIStatusError,\n APITimeoutError,\n DEFAULT_API_CONNECT_OPTIONS,\n type Expand,\n toError,\n} from '../index.js';\nimport * as llm from '../llm/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { type AnyString, createAccessToken } from './utils.js';\n\nconst DEFAULT_BASE_URL = 'https://agent-gateway.livekit.cloud/v1';\n\nexport type OpenAIModels =\n | 'openai/gpt-5.2'\n | 'openai/gpt-5.2-chat-latest'\n | 'openai/gpt-5.1'\n | 'openai/gpt-5.1-chat-latest'\n | 'openai/gpt-5'\n | 'openai/gpt-5-mini'\n | 'openai/gpt-5-nano'\n | 'openai/gpt-4.1'\n | 'openai/gpt-4.1-mini'\n | 'openai/gpt-4.1-nano'\n | 'openai/gpt-4o'\n | 'openai/gpt-4o-mini'\n | 'openai/gpt-oss-120b';\n\nexport type GoogleModels =\n | 'google/gemini-3-pro'\n | 'google/gemini-3-flash'\n | 'google/gemini-2.5-pro'\n | 'google/gemini-2.5-flash'\n | 'google/gemini-2.5-flash-lite'\n | 'google/gemini-2.0-flash'\n | 'google/gemini-2.0-flash-lite';\n\nexport type MoonshotModels = 'moonshotai/kimi-k2-instruct';\n\nexport type DeepSeekModels = 'deepseek-ai/deepseek-v3' | 'deepseek-ai/deepseek-v3.2';\n\ntype ChatCompletionPredictionContentParam =\n Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;\ntype WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;\ntype ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;\ntype Verbosity = 'low' | 'medium' | 'high';\n\nexport interface ChatCompletionOptions extends Record<string, unknown> {\n frequency_penalty?: number;\n logit_bias?: Record<string, number>;\n logprobs?: boolean;\n max_completion_tokens?: number;\n max_tokens?: number;\n metadata?: Record<string, string>;\n modalities?: Array<'text' | 'audio'>;\n n?: number;\n parallel_tool_calls?: boolean;\n prediction?: ChatCompletionPredictionContentParam | null;\n presence_penalty?: number;\n prompt_cache_key?: string;\n reasoning_effort?: 'minimal' | 'low' | 'medium' | 'high';\n safety_identifier?: string;\n seed?: number;\n service_tier?: 'auto' | 'default' | 'flex' | 'scale' | 'priority';\n stop?: string | string[];\n store?: boolean;\n temperature?: number;\n top_logprobs?: number;\n top_p?: number;\n user?: string;\n verbosity?: Verbosity;\n web_search_options?: WebSearchOptions;\n\n // livekit-typed arguments\n tool_choice?: ToolChoice;\n // TODO(brian): support response format\n // response_format?: OpenAI.Chat.Completions.ChatCompletionCreateParams['response_format']\n}\n\nexport type LLMModels = OpenAIModels | GoogleModels | MoonshotModels | DeepSeekModels | AnyString;\n\nexport interface InferenceLLMOptions {\n model: LLMModels;\n provider?: string;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: ChatCompletionOptions;\n strictToolSchema?: boolean;\n}\n\nexport interface GatewayOptions {\n apiKey: string;\n apiSecret: string;\n}\n\n/**\n * Livekit Cloud Inference LLM\n */\nexport class LLM extends llm.LLM {\n private client: OpenAI;\n private opts: InferenceLLMOptions;\n\n constructor(opts: {\n model: LLMModels;\n provider?: string;\n baseURL?: string;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: InferenceLLMOptions['modelOptions'];\n strictToolSchema?: boolean;\n }) {\n super();\n\n const {\n model,\n provider,\n baseURL,\n apiKey,\n apiSecret,\n modelOptions,\n strictToolSchema = false,\n } = opts;\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n provider,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions: modelOptions || {},\n strictToolSchema,\n };\n\n this.client = new OpenAI({\n baseURL: this.opts.baseURL,\n apiKey: '', // leave a temporary empty string to avoid OpenAI complain about missing key\n });\n }\n\n label(): string {\n return 'inference.LLM';\n }\n\n get model(): string {\n return this.opts.model;\n }\n\n static fromModelString(modelString: string): LLM {\n return new LLM({ model: modelString });\n }\n\n chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n // TODO(AJS-270): Add response_format parameter support\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n // TODO(AJS-270): Add responseFormat parameter\n extraKwargs?: Record<string, unknown>;\n }): LLMStream {\n let modelOptions: Record<string, unknown> = { ...(extraKwargs || {}) };\n\n parallelToolCalls =\n parallelToolCalls !== undefined\n ? parallelToolCalls\n : this.opts.modelOptions.parallel_tool_calls;\n\n if (toolCtx && Object.keys(toolCtx).length > 0 && parallelToolCalls !== undefined) {\n modelOptions.parallel_tool_calls = parallelToolCalls;\n }\n\n toolChoice =\n toolChoice !== undefined\n ? toolChoice\n : (this.opts.modelOptions.tool_choice as llm.ToolChoice | undefined);\n\n if (toolChoice) {\n modelOptions.tool_choice = toolChoice as ToolChoice;\n }\n\n // TODO(AJS-270): Add response_format support here\n\n modelOptions = { ...modelOptions, ...this.opts.modelOptions };\n\n return new LLMStream(this, {\n model: this.opts.model,\n provider: this.opts.provider,\n client: this.client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema: this.opts.strictToolSchema ?? false, // default to false if not set\n gatewayOptions: {\n apiKey: this.opts.apiKey,\n apiSecret: this.opts.apiSecret,\n },\n });\n }\n}\n\nexport class LLMStream extends llm.LLMStream {\n private model: LLMModels;\n private provider?: string;\n private providerFmt: llm.ProviderFormat;\n private client: OpenAI;\n private modelOptions: Record<string, unknown>;\n private strictToolSchema: boolean;\n\n private gatewayOptions?: GatewayOptions;\n private toolCallId?: string;\n private toolIndex?: number;\n private fncName?: string;\n private fncRawArguments?: string;\n private toolExtra?: Record<string, unknown>;\n\n constructor(\n llm: LLM,\n {\n model,\n provider,\n client,\n chatCtx,\n toolCtx,\n gatewayOptions,\n connOptions,\n modelOptions,\n providerFmt,\n strictToolSchema,\n }: {\n model: LLMModels;\n provider?: string;\n client: OpenAI;\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n gatewayOptions?: GatewayOptions;\n connOptions: APIConnectOptions;\n modelOptions: Record<string, unknown>;\n providerFmt?: llm.ProviderFormat;\n strictToolSchema: boolean;\n },\n ) {\n super(llm, { chatCtx, toolCtx, connOptions });\n this.client = client;\n this.gatewayOptions = gatewayOptions;\n this.provider = provider;\n this.providerFmt = providerFmt || 'openai';\n this.modelOptions = modelOptions;\n this.model = model;\n this.strictToolSchema = strictToolSchema;\n }\n\n protected async run(): Promise<void> {\n // current function call that we're waiting for full completion (args are streamed)\n // (defined inside the run method to make sure the state is reset for each run/attempt)\n let retryable = true;\n this.toolCallId = this.fncName = this.fncRawArguments = this.toolIndex = undefined;\n this.toolExtra = undefined;\n\n try {\n const messages = (await this.chatCtx.toProviderFormat(\n this.providerFmt,\n )) as OpenAI.ChatCompletionMessageParam[];\n\n const tools = this.toolCtx\n ? Object.entries(this.toolCtx).map(([name, func]) => {\n const oaiParams = {\n type: 'function' as const,\n function: {\n name,\n description: func.description,\n parameters: llm.toJsonSchema(\n func.parameters,\n true,\n this.strictToolSchema,\n ) as unknown as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function']['parameters'],\n } as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function'],\n };\n\n if (this.strictToolSchema) {\n oaiParams.function.strict = true;\n }\n\n return oaiParams;\n })\n : undefined;\n\n const requestOptions: Record<string, unknown> = { ...this.modelOptions };\n if (!tools) {\n delete requestOptions.tool_choice;\n }\n\n // Dynamically set the access token for the LiveKit Agent Gateway API\n if (this.gatewayOptions) {\n this.client.apiKey = await createAccessToken(\n this.gatewayOptions.apiKey,\n this.gatewayOptions.apiSecret,\n );\n }\n\n if (this.provider) {\n const extraHeaders = requestOptions.extra_headers\n ? (requestOptions.extra_headers as Record<string, string>)\n : {};\n extraHeaders['X-LiveKit-Inference-Provider'] = this.provider;\n requestOptions.extra_headers = extraHeaders;\n }\n\n const stream = await this.client.chat.completions.create(\n {\n model: this.model,\n messages,\n tools,\n stream: true,\n stream_options: { include_usage: true },\n ...requestOptions,\n },\n {\n timeout: this.connOptions.timeoutMs,\n },\n );\n\n for await (const chunk of stream) {\n for (const choice of chunk.choices) {\n if (this.abortController.signal.aborted) {\n break;\n }\n const chatChunk = this.parseChoice(chunk.id, choice);\n if (chatChunk) {\n retryable = false;\n this.queue.put(chatChunk);\n }\n }\n\n if (chunk.usage) {\n const usage = chunk.usage;\n retryable = false;\n this.queue.put({\n id: chunk.id,\n usage: {\n completionTokens: usage.completion_tokens,\n promptTokens: usage.prompt_tokens,\n promptCachedTokens: usage.prompt_tokens_details?.cached_tokens || 0,\n totalTokens: usage.total_tokens,\n },\n });\n }\n }\n } catch (error) {\n if (error instanceof OpenAI.APIConnectionTimeoutError) {\n throw new APITimeoutError({ options: { retryable } });\n } else if (error instanceof OpenAI.APIError) {\n throw new APIStatusError({\n message: error.message,\n options: {\n statusCode: error.status,\n body: error.error,\n requestId: error.requestID,\n retryable,\n },\n });\n } else {\n throw new APIConnectionError({\n message: toError(error).message,\n options: { retryable },\n });\n }\n }\n }\n\n private parseChoice(\n id: string,\n choice: OpenAI.ChatCompletionChunk.Choice,\n ): llm.ChatChunk | undefined {\n const delta = choice.delta;\n\n // https://github.com/livekit/agents/issues/688\n // the delta can be None when using Azure OpenAI (content filtering)\n if (delta === undefined) return undefined;\n\n if (delta.tool_calls) {\n // check if we have functions to calls\n for (const tool of delta.tool_calls) {\n if (!tool.function) {\n continue; // oai may add other tools in the future\n }\n\n /**\n * The way OpenAI streams tool calls is a bit tricky.\n *\n * For any new tool call, it first emits a delta tool call with id, and function name,\n * the rest of the delta chunks will only stream the remaining arguments string,\n * until a new tool call is started or the tool call is finished.\n * See below for an example.\n *\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)\n * [ChoiceDeltaToolCall(index=0, id='call_LaVeHWUHpef9K1sd5UO8TtLg', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"P', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris\\}', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id='call_ThU4OmMdQXnnVmpXGOCknXIB', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"T', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='okyo', name=None), type=None)]\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='tool_calls', index=0, logprobs=None)\n */\n let callChunk: llm.ChatChunk | undefined;\n // If we have a previous tool call and this is a new one, emit the previous\n if (this.toolCallId && tool.id && tool.index !== this.toolIndex) {\n callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n // Note: We intentionally do NOT reset toolExtra here.\n // For Gemini 3+, the thought_signature is only provided on the first tool call\n // in a parallel batch, but must be applied to ALL tool calls in the batch.\n // We preserve toolExtra so subsequent tool calls inherit the thought_signature.\n }\n\n // Start or continue building the current tool call\n if (tool.function.name) {\n this.toolIndex = tool.index;\n this.toolCallId = tool.id;\n this.fncName = tool.function.name;\n this.fncRawArguments = tool.function.arguments || '';\n // Extract extra from tool call (e.g., Google thought signatures)\n // Only update toolExtra if this tool call has extra_content.\n // Otherwise, inherit from previous tool call (for parallel Gemini tool calls).\n const newToolExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((tool as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n if (newToolExtra) {\n this.toolExtra = newToolExtra;\n }\n } else if (tool.function.arguments) {\n this.fncRawArguments = (this.fncRawArguments || '') + tool.function.arguments;\n }\n\n if (callChunk) {\n return callChunk;\n }\n }\n }\n\n // If we're done with tool calls, emit the final one\n if (\n choice.finish_reason &&\n ['tool_calls', 'stop'].includes(choice.finish_reason) &&\n this.toolCallId !== undefined\n ) {\n const callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n // Reset toolExtra at the end of the response (not between parallel tool calls)\n this.toolExtra = undefined;\n return callChunk;\n }\n\n // Extract extra from delta (e.g., Google thought signatures on text parts)\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n // Regular content message\n if (!delta.content && !deltaExtra) {\n return undefined;\n }\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n },\n };\n }\n\n private createRunningToolCallChunk(\n id: string,\n delta: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta,\n ): llm.ChatChunk {\n const toolExtra = this.toolExtra ? { ...this.toolExtra } : {};\n const thoughtSignature = this.extractThoughtSignature(toolExtra);\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n toolCalls: [\n llm.FunctionCall.create({\n callId: this.toolCallId || '',\n name: this.fncName || '',\n args: this.fncRawArguments || '',\n extra: toolExtra,\n thoughtSignature,\n }),\n ],\n },\n };\n }\n\n private extractThoughtSignature(extra?: Record<string, unknown>): string | undefined {\n const googleExtra = extra?.google;\n if (googleExtra && typeof googleExtra === 'object') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (googleExtra as any).thoughtSignature || (googleExtra as any).thought_signature;\n }\n return undefined;\n }\n}\n"],"mappings":"AAGA,OAAO,YAAY;AACnB;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP,YAAY,SAAS;AAErB,SAAyB,yBAAyB;AAElD,MAAM,mBAAmB;AAwFlB,MAAM,YAAY,IAAI,IAAI;AAAA,EACvB;AAAA,EACA;AAAA,EAER,YAAY,MAQT;AACD,UAAM;AAEN,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,IACrB,IAAI;AAEJ,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,cAAc,gBAAgB,CAAC;AAAA,MAC/B;AAAA,IACF;AAEA,SAAK,SAAS,IAAI,OAAO;AAAA,MACvB,SAAS,KAAK,KAAK;AAAA,MACnB,QAAQ;AAAA;AAAA,IACV,CAAC;AAAA,EACH;AAAA,EAEA,QAAgB;AACd,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA,EAEA,OAAO,gBAAgB,aAA0B;AAC/C,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,KAAK;AAAA,IACH;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA;AAAA,IAEA;AAAA,EACF,GAQc;AACZ,QAAI,eAAwC,EAAE,GAAI,eAAe,CAAC,EAAG;AAErE,wBACE,sBAAsB,SAClB,oBACA,KAAK,KAAK,aAAa;AAE7B,QAAI,WAAW,OAAO,KAAK,OAAO,EAAE,SAAS,KAAK,sBAAsB,QAAW;AACjF,mBAAa,sBAAsB;AAAA,IACrC;AAEA,iBACE,eAAe,SACX,aACC,KAAK,KAAK,aAAa;AAE9B,QAAI,YAAY;AACd,mBAAa,cAAc;AAAA,IAC7B;AAIA,mBAAe,EAAE,GAAG,cAAc,GAAG,KAAK,KAAK,aAAa;AAE5D,WAAO,IAAI,UAAU,MAAM;AAAA,MACzB,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU,KAAK,KAAK;AAAA,MACpB,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB,KAAK,KAAK,oBAAoB;AAAA;AAAA,MAChD,gBAAgB;AAAA,QACd,QAAQ,KAAK,KAAK;AAAA,QAClB,WAAW,KAAK,KAAK;AAAA,MACvB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,MAAM,kBAAkB,IAAI,UAAU;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACEA,MACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAYA;AACA,UAAMA,MAAK,EAAE,SAAS,SAAS,YAAY,CAAC;AAC5C,SAAK,SAAS;AACd,SAAK,iBAAiB;AACtB,SAAK,WAAW;AAChB,SAAK,cAAc,eAAe;AAClC,SAAK,eAAe;AACpB,SAAK,QAAQ;AACb,SAAK,mBAAmB;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AAtRvC;AAyRI,QAAI,YAAY;AAChB,SAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB,KAAK,YAAY;AACzE,SAAK,YAAY;AAEjB,QAAI;AACF,YAAM,WAAY,MAAM,KAAK,QAAQ;AAAA,QACnC,KAAK;AAAA,MACP;AAEA,YAAM,QAAQ,KAAK,UACf,OAAO,QAAQ,KAAK,OAAO,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM;AACjD,cAAM,YAAY;AAAA,UAChB,MAAM;AAAA,UACN,UAAU;AAAA,YACR;AAAA,YACA,aAAa,KAAK;AAAA,YAClB,YAAY,IAAI;AAAA,cACd,KAAK;AAAA,cACL;AAAA,cACA,KAAK;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,kBAAkB;AACzB,oBAAU,SAAS,SAAS;AAAA,QAC9B;AAEA,eAAO;AAAA,MACT,CAAC,IACD;AAEJ,YAAM,iBAA0C,EAAE,GAAG,KAAK,aAAa;AACvE,UAAI,CAAC,OAAO;AACV,eAAO,eAAe;AAAA,MACxB;AAGA,UAAI,KAAK,gBAAgB;AACvB,aAAK,OAAO,SAAS,MAAM;AAAA,UACzB,KAAK,eAAe;AAAA,UACpB,KAAK,eAAe;AAAA,QACtB;AAAA,MACF;AAEA,UAAI,KAAK,UAAU;AACjB,cAAM,eAAe,eAAe,gBAC/B,eAAe,gBAChB,CAAC;AACL,qBAAa,8BAA8B,IAAI,KAAK;AACpD,uBAAe,gBAAgB;AAAA,MACjC;AAEA,YAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY;AAAA,QAChD;AAAA,UACE,OAAO,KAAK;AAAA,UACZ;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,UACtC,GAAG;AAAA,QACL;AAAA,QACA;AAAA,UACE,SAAS,KAAK,YAAY;AAAA,QAC5B;AAAA,MACF;AAEA,uBAAiB,SAAS,QAAQ;AAChC,mBAAW,UAAU,MAAM,SAAS;AAClC,cAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,UACF;AACA,gBAAM,YAAY,KAAK,YAAY,MAAM,IAAI,MAAM;AACnD,cAAI,WAAW;AACb,wBAAY;AACZ,iBAAK,MAAM,IAAI,SAAS;AAAA,UAC1B;AAAA,QACF;AAEA,YAAI,MAAM,OAAO;AACf,gBAAM,QAAQ,MAAM;AACpB,sBAAY;AACZ,eAAK,MAAM,IAAI;AAAA,YACb,IAAI,MAAM;AAAA,YACV,OAAO;AAAA,cACL,kBAAkB,MAAM;AAAA,cACxB,cAAc,MAAM;AAAA,cACpB,sBAAoB,WAAM,0BAAN,mBAA6B,kBAAiB;AAAA,cAClE,aAAa,MAAM;AAAA,YACrB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB,OAAO,2BAA2B;AACrD,cAAM,IAAI,gBAAgB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AAAA,MACtD,WAAW,iBAAiB,OAAO,UAAU;AAC3C,cAAM,IAAI,eAAe;AAAA,UACvB,SAAS,MAAM;AAAA,UACf,SAAS;AAAA,YACP,YAAY,MAAM;AAAA,YAClB,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,YACjB;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS,QAAQ,KAAK,EAAE;AAAA,UACxB,SAAS,EAAE,UAAU;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,YACN,IACA,QAC2B;AAC3B,UAAM,QAAQ,OAAO;AAIrB,QAAI,UAAU,OAAW,QAAO;AAEhC,QAAI,MAAM,YAAY;AAEpB,iBAAW,QAAQ,MAAM,YAAY;AACnC,YAAI,CAAC,KAAK,UAAU;AAClB;AAAA,QACF;AAmBA,YAAI;AAEJ,YAAI,KAAK,cAAc,KAAK,MAAM,KAAK,UAAU,KAAK,WAAW;AAC/D,sBAAY,KAAK,2BAA2B,IAAI,KAAK;AACrD,eAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AAAA,QAK1D;AAGA,YAAI,KAAK,SAAS,MAAM;AACtB,eAAK,YAAY,KAAK;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,UAAU,KAAK,SAAS;AAC7B,eAAK,kBAAkB,KAAK,SAAS,aAAa;AAIlD,gBAAM;AAAA;AAAA,YAEF,KAAa,iBAAyD;AAAA;AAC1E,cAAI,cAAc;AAChB,iBAAK,YAAY;AAAA,UACnB;AAAA,QACF,WAAW,KAAK,SAAS,WAAW;AAClC,eAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK,SAAS;AAAA,QACtE;AAEA,YAAI,WAAW;AACb,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAGA,QACE,OAAO,iBACP,CAAC,cAAc,MAAM,EAAE,SAAS,OAAO,aAAa,KACpD,KAAK,eAAe,QACpB;AACA,YAAM,YAAY,KAAK,2BAA2B,IAAI,KAAK;AAC3D,WAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AAExD,WAAK,YAAY;AACjB,aAAO;AAAA,IACT;AAGA,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAG3E,QAAI,CAAC,MAAM,WAAW,CAAC,YAAY;AACjC,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,2BACN,IACA,OACe;AACf,UAAM,YAAY,KAAK,YAAY,EAAE,GAAG,KAAK,UAAU,IAAI,CAAC;AAC5D,UAAM,mBAAmB,KAAK,wBAAwB,SAAS;AAC/D,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAE3E,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,QACP,WAAW;AAAA,UACT,IAAI,aAAa,OAAO;AAAA,YACtB,QAAQ,KAAK,cAAc;AAAA,YAC3B,MAAM,KAAK,WAAW;AAAA,YACtB,MAAM,KAAK,mBAAmB;AAAA,YAC9B,OAAO;AAAA,YACP;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,wBAAwB,OAAqD;AACnF,UAAM,cAAc,+BAAO;AAC3B,QAAI,eAAe,OAAO,gBAAgB,UAAU;AAElD,aAAQ,YAAoB,oBAAqB,YAAoB;AAAA,IACvE;AACA,WAAO;AAAA,EACT;AACF;","names":["llm"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/inference/stt.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { type AudioFrame } from '@livekit/rtc-node';\nimport type { WebSocket } from 'ws';\nimport { APIError, APIStatusError } from '../_exceptions.js';\nimport { AudioByteStream } from '../audio.js';\nimport { log } from '../log.js';\nimport { createStreamChannel } from '../stream/stream_channel.js';\nimport {\n STT as BaseSTT,\n SpeechStream as BaseSpeechStream,\n type SpeechData,\n type SpeechEvent,\n SpeechEventType,\n} from '../stt/index.js';\nimport { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS } from '../types.js';\nimport { type AudioBuffer, Event, Task, cancelAndWait, shortuuid, waitForAbort } from '../utils.js';\nimport type { TimedString } from '../voice/io.js';\nimport {\n type SttServerEvent,\n type SttTranscriptEvent,\n sttServerEventSchema,\n} from './api_protos.js';\nimport { type AnyString, connectWs, createAccessToken } from './utils.js';\n\nexport type DeepgramModels =\n | 'deepgram'\n | 'deepgram/nova-3'\n | 'deepgram/nova-3-general'\n | 'deepgram/nova-3-medical'\n | 'deepgram/nova-2-conversationalai'\n | 'deepgram/nova-2'\n | 'deepgram/nova-2-general'\n | 'deepgram/nova-2-medical'\n | 'deepgram/nova-2-phonecall';\n\nexport type CartesiaModels = 'cartesia' | 'cartesia/ink-whisper';\n\nexport type AssemblyaiModels = 'assemblyai' | 'assemblyai/universal-streaming';\n\nexport interface CartesiaOptions {\n min_volume?: number; // default: not specified\n max_silence_duration_secs?: number; // default: not specified\n}\n\nexport interface DeepgramOptions {\n filler_words?: boolean; // default: true\n interim_results?: boolean; // default: true\n endpointing?: number; // default: 25 (ms)\n punctuate?: boolean; // default: false\n smart_format?: boolean;\n keywords?: Array<[string, number]>;\n keyterms?: string[];\n profanity_filter?: boolean;\n numerals?: boolean;\n mip_opt_out?: boolean;\n}\n\nexport interface AssemblyAIOptions {\n format_turns?: boolean; // default: false\n end_of_turn_confidence_threshold?: number; // default: 0.01\n min_end_of_turn_silence_when_confident?: number; // default: 0\n max_turn_silence?: number; // default: not specified\n keyterms_prompt?: string[]; // default: not specified\n}\n\nexport type STTLanguages =\n | 'multi'\n | 'en'\n | 'de'\n | 'es'\n | 'fr'\n | 'ja'\n | 'pt'\n | 'zh'\n | 'hi'\n | AnyString;\n\ntype _STTModels = DeepgramModels | CartesiaModels | AssemblyaiModels;\n\nexport type STTModels = _STTModels | 'auto' | AnyString;\n\nexport type ModelWithLanguage = `${_STTModels}:${STTLanguages}` | STTModels;\n\nexport type STTOptions<TModel extends STTModels> = TModel extends DeepgramModels\n ? DeepgramOptions\n : TModel extends CartesiaModels\n ? CartesiaOptions\n : TModel extends AssemblyaiModels\n ? AssemblyAIOptions\n : Record<string, unknown>;\n\nexport type STTEncoding = 'pcm_s16le';\n\nconst DEFAULT_ENCODING: STTEncoding = 'pcm_s16le';\nconst DEFAULT_SAMPLE_RATE = 16000;\nconst DEFAULT_BASE_URL = 'wss://agent-gateway.livekit.cloud/v1';\nconst DEFAULT_CANCEL_TIMEOUT = 5000;\n\nexport interface InferenceSTTOptions<TModel extends STTModels> {\n model?: TModel;\n language?: STTLanguages;\n encoding: STTEncoding;\n sampleRate: number;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: STTOptions<TModel>;\n}\n\n/**\n * Livekit Cloud Inference STT\n */\nexport class STT<TModel extends STTModels> extends BaseSTT {\n private opts: InferenceSTTOptions<TModel>;\n private streams: Set<SpeechStream<TModel>> = new Set();\n\n #logger = log();\n\n constructor(opts?: {\n model?: TModel;\n language?: STTLanguages;\n baseURL?: string;\n encoding?: STTEncoding;\n sampleRate?: number;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: STTOptions<TModel>;\n }) {\n super({ streaming: true, interimResults: true, alignedTranscript: 'word' });\n\n const {\n model,\n language,\n baseURL,\n encoding = DEFAULT_ENCODING,\n sampleRate = DEFAULT_SAMPLE_RATE,\n apiKey,\n apiSecret,\n modelOptions = {} as STTOptions<TModel>,\n } = opts || {};\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n language,\n encoding,\n sampleRate,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions,\n };\n }\n\n get label(): string {\n return 'inference.STT';\n }\n\n static fromModelString(modelString: string): STT<AnyString> {\n if (modelString.includes(':')) {\n const [model, language] = modelString.split(':') as [AnyString, STTLanguages];\n return new STT({ model, language });\n }\n return new STT({ model: modelString });\n }\n\n protected async _recognize(_: AudioBuffer): Promise<SpeechEvent> {\n throw new Error('LiveKit STT does not support batch recognition, use stream() instead');\n }\n\n updateOptions(opts: Partial<Pick<InferenceSTTOptions<TModel>, 'model' | 'language'>>): void {\n this.opts = { ...this.opts, ...opts };\n\n for (const stream of this.streams) {\n stream.updateOptions(opts);\n }\n }\n\n stream(options?: {\n language?: STTLanguages | string;\n connOptions?: APIConnectOptions;\n }): SpeechStream<TModel> {\n const { language, connOptions = DEFAULT_API_CONNECT_OPTIONS } = options || {};\n const streamOpts = {\n ...this.opts,\n language: language ?? this.opts.language,\n } as InferenceSTTOptions<TModel>;\n\n const stream = new SpeechStream(this, streamOpts, connOptions);\n this.streams.add(stream);\n\n return stream;\n }\n\n async connectWs(timeout: number): Promise<WebSocket> {\n const params = {\n settings: {\n sample_rate: String(this.opts.sampleRate),\n encoding: this.opts.encoding,\n extra: this.opts.modelOptions,\n },\n } as Record<string, unknown>;\n\n if (this.opts.model && this.opts.model !== 'auto') {\n params.model = this.opts.model;\n }\n\n if (this.opts.language) {\n (params.settings as Record<string, unknown>).language = this.opts.language;\n }\n\n let baseURL = this.opts.baseURL;\n if (baseURL.startsWith('http://') || baseURL.startsWith('https://')) {\n baseURL = baseURL.replace('http', 'ws');\n }\n\n const token = await createAccessToken(this.opts.apiKey, this.opts.apiSecret);\n const url = `${baseURL}/stt`;\n const headers = { Authorization: `Bearer ${token}` } as Record<string, string>;\n\n const socket = await connectWs(url, headers, timeout);\n const msg = { ...params, type: 'session.create' };\n socket.send(JSON.stringify(msg));\n\n return socket;\n }\n}\n\nexport class SpeechStream<TModel extends STTModels> extends BaseSpeechStream {\n private opts: InferenceSTTOptions<TModel>;\n private requestId = shortuuid('stt_request_');\n private speaking = false;\n private speechDuration = 0;\n private reconnectEvent = new Event();\n private stt: STT<TModel>;\n private connOptions: APIConnectOptions;\n\n #logger = log();\n\n constructor(\n sttImpl: STT<TModel>,\n opts: InferenceSTTOptions<TModel>,\n connOptions: APIConnectOptions,\n ) {\n super(sttImpl, opts.sampleRate, connOptions);\n this.opts = opts;\n this.stt = sttImpl;\n this.connOptions = connOptions;\n }\n\n get label(): string {\n return 'inference.SpeechStream';\n }\n\n updateOptions(opts: Partial<Pick<InferenceSTTOptions<TModel>, 'model' | 'language'>>): void {\n this.opts = { ...this.opts, ...opts };\n this.reconnectEvent.set();\n }\n\n protected async run(): Promise<void> {\n while (true) {\n // Create fresh resources for each connection attempt\n let ws: WebSocket | null = null;\n let closing = false;\n let finalReceived = false;\n\n const eventChannel = createStreamChannel<SttServerEvent>();\n\n const resourceCleanup = () => {\n if (closing) return;\n closing = true;\n eventChannel.close();\n ws?.removeAllListeners();\n ws?.close();\n };\n\n const createWsListener = async (ws: WebSocket, signal: AbortSignal) => {\n return new Promise<void>((resolve, reject) => {\n const onAbort = () => {\n resourceCleanup();\n reject(new Error('WebSocket connection aborted'));\n };\n\n signal.addEventListener('abort', onAbort, { once: true });\n\n ws.on('message', (data) => {\n const json = JSON.parse(data.toString()) as SttServerEvent;\n eventChannel.write(json);\n });\n\n ws.on('error', (e) => {\n this.#logger.error({ error: e }, 'WebSocket error');\n resourceCleanup();\n reject(e);\n });\n\n ws.on('close', (code: number) => {\n resourceCleanup();\n\n if (!closing) return this.#logger.error('WebSocket closed unexpectedly');\n if (finalReceived) return resolve();\n\n reject(\n new APIStatusError({\n message: 'LiveKit STT connection closed unexpectedly',\n options: { statusCode: code },\n }),\n );\n });\n });\n };\n\n const send = async (socket: WebSocket, signal: AbortSignal) => {\n const audioStream = new AudioByteStream(\n this.opts.sampleRate,\n 1,\n Math.floor(this.opts.sampleRate / 20), // 50ms\n );\n\n // Create abort promise once to avoid memory leak\n const abortPromise = new Promise<never>((_, reject) => {\n if (signal.aborted) {\n return reject(new Error('Send aborted'));\n }\n const onAbort = () => reject(new Error('Send aborted'));\n signal.addEventListener('abort', onAbort, { once: true });\n });\n\n // Manual iteration to support cancellation\n const iterator = this.input[Symbol.asyncIterator]();\n try {\n while (true) {\n const result = await Promise.race([iterator.next(), abortPromise]);\n\n if (result.done) break;\n const ev = result.value;\n\n let frames: AudioFrame[];\n if (ev === SpeechStream.FLUSH_SENTINEL) {\n frames = audioStream.flush();\n } else {\n const frame = ev as AudioFrame;\n frames = audioStream.write(new Int16Array(frame.data).buffer);\n }\n\n for (const frame of frames) {\n this.speechDuration += frame.samplesPerChannel / frame.sampleRate;\n const base64 = Buffer.from(frame.data.buffer).toString('base64');\n const msg = { type: 'input_audio', audio: base64 };\n socket.send(JSON.stringify(msg));\n }\n }\n\n closing = true;\n socket.send(JSON.stringify({ type: 'session.finalize' }));\n } catch (e) {\n if ((e as Error).message === 'Send aborted') {\n // Expected abort, don't log\n return;\n }\n throw e;\n }\n };\n\n const recv = async (signal: AbortSignal) => {\n const serverEventStream = eventChannel.stream();\n const reader = serverEventStream.getReader();\n\n try {\n while (!this.closed && !signal.aborted) {\n const result = await reader.read();\n if (signal.aborted) return;\n if (result.done) return;\n\n // Parse and validate with Zod schema\n const parseResult = await sttServerEventSchema.safeParseAsync(result.value);\n if (!parseResult.success) {\n this.#logger.warn(\n { error: parseResult.error, rawData: result.value },\n 'Failed to parse STT server event',\n );\n continue;\n }\n\n const event: SttServerEvent = parseResult.data;\n\n switch (event.type) {\n case 'session.created':\n case 'session.finalized':\n break;\n case 'session.closed':\n finalReceived = true;\n resourceCleanup();\n break;\n case 'interim_transcript':\n this.processTranscript(event, false);\n break;\n case 'final_transcript':\n this.processTranscript(event, true);\n break;\n case 'error':\n this.#logger.error({ error: event }, 'Received error from LiveKit STT');\n resourceCleanup();\n throw new APIError(`LiveKit STT returned error: ${JSON.stringify(event)}`);\n }\n }\n } finally {\n reader.releaseLock();\n try {\n await serverEventStream.cancel();\n } catch (e) {\n this.#logger.debug('Error cancelling serverEventStream (may already be cancelled):', e);\n }\n }\n };\n\n try {\n ws = await this.stt.connectWs(this.connOptions.timeoutMs);\n\n const controller = this.abortController; // Use base class abortController for proper cancellation\n const sendTask = Task.from(({ signal }) => send(ws!, signal), controller);\n const wsListenerTask = Task.from(({ signal }) => createWsListener(ws!, signal), controller);\n const recvTask = Task.from(({ signal }) => recv(signal), controller);\n const waitReconnectTask = Task.from(\n ({ signal }) => Promise.race([this.reconnectEvent.wait(), waitForAbort(signal)]),\n controller,\n );\n\n try {\n await Promise.race([\n Promise.all([sendTask.result, wsListenerTask.result, recvTask.result]),\n waitReconnectTask.result,\n ]);\n\n // If reconnect didn't trigger, tasks finished - exit loop\n if (!waitReconnectTask.done) break;\n\n // Reconnect triggered - clear event and continue loop\n this.reconnectEvent.clear();\n } finally {\n // Cancel all tasks to ensure cleanup\n await cancelAndWait(\n [sendTask, wsListenerTask, recvTask, waitReconnectTask],\n DEFAULT_CANCEL_TIMEOUT,\n );\n resourceCleanup();\n }\n } finally {\n // Ensure cleanup even if connectWs throws\n resourceCleanup();\n }\n }\n }\n\n private processTranscript(data: SttTranscriptEvent, isFinal: boolean) {\n // Check if queue is closed to avoid race condition during disconnect\n if (this.queue.closed) return;\n\n const requestId = data.session_id || this.requestId;\n const text = data.transcript;\n const language = data.language || this.opts.language || 'en';\n\n if (!text && !isFinal) return;\n\n try {\n // We'll have a more accurate way of detecting when speech started when we have VAD\n if (!this.speaking) {\n this.speaking = true;\n this.queue.put({ type: SpeechEventType.START_OF_SPEECH });\n }\n\n const speechData: SpeechData = {\n language,\n startTime: this.startTimeOffset + data.start,\n endTime: this.startTimeOffset + data.start + data.duration,\n confidence: data.confidence,\n text,\n words: data.words.map(\n (word): TimedString => ({\n text: word.word,\n startTime: word.start + this.startTimeOffset,\n endTime: word.end + this.startTimeOffset,\n startTimeOffset: this.startTimeOffset,\n confidence: word.confidence,\n }),\n ),\n };\n\n if (isFinal) {\n if (this.speechDuration > 0) {\n this.queue.put({\n type: SpeechEventType.RECOGNITION_USAGE,\n requestId,\n recognitionUsage: { audioDuration: this.speechDuration },\n });\n this.speechDuration = 0;\n }\n\n this.queue.put({\n type: SpeechEventType.FINAL_TRANSCRIPT,\n requestId,\n alternatives: [speechData],\n });\n\n if (this.speaking) {\n this.speaking = false;\n this.queue.put({ type: SpeechEventType.END_OF_SPEECH });\n }\n } else {\n this.queue.put({\n type: SpeechEventType.INTERIM_TRANSCRIPT,\n requestId,\n alternatives: [speechData],\n });\n }\n } catch (e) {\n if (e instanceof Error && e.message.includes('Queue is closed')) {\n // Expected behavior on disconnect, log as warning\n this.#logger.warn(\n { err: e },\n 'Queue closed during transcript processing (expected during disconnect)',\n );\n } else {\n this.#logger.error({ err: e }, 'Error putting transcript to queue');\n }\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,sBAAgC;AAEhC,wBAAyC;AACzC,mBAAgC;AAChC,iBAAoB;AACpB,4BAAoC;AACpC,iBAMO;AACP,mBAAoE;AACpE,mBAAsF;AAEtF,wBAIO;AACP,IAAAA,gBAA6D;AAuE7D,MAAM,mBAAgC;AACtC,MAAM,sBAAsB;AAC5B,MAAM,mBAAmB;AACzB,MAAM,yBAAyB;AAgBxB,MAAM,YAAsC,WAAAC,IAAQ;AAAA,EACjD;AAAA,EACA,UAAqC,oBAAI,IAAI;AAAA,EAErD,cAAU,gBAAI;AAAA,EAEd,YAAY,MAST;AACD,UAAM,EAAE,WAAW,MAAM,gBAAgB,MAAM,mBAAmB,OAAO,CAAC;AAE1E,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,eAAe,CAAC;AAAA,IAClB,IAAI,QAAQ,CAAC;AAEb,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,OAAO,gBAAgB,aAAqC;AAC1D,QAAI,YAAY,SAAS,GAAG,GAAG;AAC7B,YAAM,CAAC,OAAO,QAAQ,IAAI,YAAY,MAAM,GAAG;AAC/C,aAAO,IAAI,IAAI,EAAE,OAAO,SAAS,CAAC;AAAA,IACpC;AACA,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,MAAgB,WAAW,GAAsC;AAC/D,UAAM,IAAI,MAAM,sEAAsE;AAAA,EACxF;AAAA,EAEA,cAAc,MAA8E;AAC1F,SAAK,OAAO,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK;AAEpC,eAAW,UAAU,KAAK,SAAS;AACjC,aAAO,cAAc,IAAI;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,OAAO,SAGkB;AACvB,UAAM,EAAE,UAAU,cAAc,yCAA4B,IAAI,WAAW,CAAC;AAC5E,UAAM,aAAa;AAAA,MACjB,GAAG,KAAK;AAAA,MACR,UAAU,YAAY,KAAK,KAAK;AAAA,IAClC;AAEA,UAAM,SAAS,IAAI,aAAa,MAAM,YAAY,WAAW;AAC7D,SAAK,QAAQ,IAAI,MAAM;AAEvB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,UAAU,SAAqC;AACnD,UAAM,SAAS;AAAA,MACb,UAAU;AAAA,QACR,aAAa,OAAO,KAAK,KAAK,UAAU;AAAA,QACxC,UAAU,KAAK,KAAK;AAAA,QACpB,OAAO,KAAK,KAAK;AAAA,MACnB;AAAA,IACF;AAEA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,UAAU,QAAQ;AACjD,aAAO,QAAQ,KAAK,KAAK;AAAA,IAC3B;AAEA,QAAI,KAAK,KAAK,UAAU;AACtB,MAAC,OAAO,SAAqC,WAAW,KAAK,KAAK;AAAA,IACpE;AAEA,QAAI,UAAU,KAAK,KAAK;AACxB,QAAI,QAAQ,WAAW,SAAS,KAAK,QAAQ,WAAW,UAAU,GAAG;AACnE,gBAAU,QAAQ,QAAQ,QAAQ,IAAI;AAAA,IACxC;AAEA,UAAM,QAAQ,UAAM,iCAAkB,KAAK,KAAK,QAAQ,KAAK,KAAK,SAAS;AAC3E,UAAM,MAAM,GAAG,OAAO;AACtB,UAAM,UAAU,EAAE,eAAe,UAAU,KAAK,GAAG;AAEnD,UAAM,SAAS,UAAM,yBAAU,KAAK,SAAS,OAAO;AACpD,UAAM,MAAM,EAAE,GAAG,QAAQ,MAAM,iBAAiB;AAChD,WAAO,KAAK,KAAK,UAAU,GAAG,CAAC;AAE/B,WAAO;AAAA,EACT;AACF;AAEO,MAAM,qBAA+C,WAAAC,aAAiB;AAAA,EACnE;AAAA,EACA,gBAAY,wBAAU,cAAc;AAAA,EACpC,WAAW;AAAA,EACX,iBAAiB;AAAA,EACjB,iBAAiB,IAAI,mBAAM;AAAA,EAC3B;AAAA,EACA;AAAA,EAER,cAAU,gBAAI;AAAA,EAEd,YACE,SACA,MACA,aACA;AACA,UAAM,SAAS,KAAK,YAAY,WAAW;AAC3C,SAAK,OAAO;AACZ,SAAK,MAAM;AACX,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,cAAc,MAA8E;AAC1F,SAAK,OAAO,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK;AACpC,SAAK,eAAe,IAAI;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AACnC,WAAO,MAAM;AAEX,UAAI,KAAuB;AAC3B,UAAI,UAAU;AACd,UAAI,gBAAgB;AAEpB,YAAM,mBAAe,2CAAoC;AAEzD,YAAM,kBAAkB,MAAM;AAC5B,YAAI,QAAS;AACb,kBAAU;AACV,qBAAa,MAAM;AACnB,iCAAI;AACJ,iCAAI;AAAA,MACN;AAEA,YAAM,mBAAmB,OAAOC,KAAe,WAAwB;AACrE,eAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,gBAAM,UAAU,MAAM;AACpB,4BAAgB;AAChB,mBAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,UAClD;AAEA,iBAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAExD,UAAAA,IAAG,GAAG,WAAW,CAAC,SAAS;AACzB,kBAAM,OAAO,KAAK,MAAM,KAAK,SAAS,CAAC;AACvC,yBAAa,MAAM,IAAI;AAAA,UACzB,CAAC;AAED,UAAAA,IAAG,GAAG,SAAS,CAAC,MAAM;AACpB,iBAAK,QAAQ,MAAM,EAAE,OAAO,EAAE,GAAG,iBAAiB;AAClD,4BAAgB;AAChB,mBAAO,CAAC;AAAA,UACV,CAAC;AAED,UAAAA,IAAG,GAAG,SAAS,CAAC,SAAiB;AAC/B,4BAAgB;AAEhB,gBAAI,CAAC,QAAS,QAAO,KAAK,QAAQ,MAAM,+BAA+B;AACvE,gBAAI,cAAe,QAAO,QAAQ;AAElC;AAAA,cACE,IAAI,iCAAe;AAAA,gBACjB,SAAS;AAAA,gBACT,SAAS,EAAE,YAAY,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAEA,YAAM,OAAO,OAAO,QAAmB,WAAwB;AAC7D,cAAM,cAAc,IAAI;AAAA,UACtB,KAAK,KAAK;AAAA,UACV;AAAA,UACA,KAAK,MAAM,KAAK,KAAK,aAAa,EAAE;AAAA;AAAA,QACtC;AAGA,cAAM,eAAe,IAAI,QAAe,CAAC,GAAG,WAAW;AACrD,cAAI,OAAO,SAAS;AAClB,mBAAO,OAAO,IAAI,MAAM,cAAc,CAAC;AAAA,UACzC;AACA,gBAAM,UAAU,MAAM,OAAO,IAAI,MAAM,cAAc,CAAC;AACtD,iBAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,QAC1D,CAAC;AAGD,cAAM,WAAW,KAAK,MAAM,OAAO,aAAa,EAAE;AAClD,YAAI;AACF,iBAAO,MAAM;AACX,kBAAM,SAAS,MAAM,QAAQ,KAAK,CAAC,SAAS,KAAK,GAAG,YAAY,CAAC;AAEjE,gBAAI,OAAO,KAAM;AACjB,kBAAM,KAAK,OAAO;AAElB,gBAAI;AACJ,gBAAI,OAAO,aAAa,gBAAgB;AACtC,uBAAS,YAAY,MAAM;AAAA,YAC7B,OAAO;AACL,oBAAM,QAAQ;AACd,uBAAS,YAAY,MAAM,IAAI,WAAW,MAAM,IAAI,EAAE,MAAM;AAAA,YAC9D;AAEA,uBAAW,SAAS,QAAQ;AAC1B,mBAAK,kBAAkB,MAAM,oBAAoB,MAAM;AACvD,oBAAM,SAAS,OAAO,KAAK,MAAM,KAAK,MAAM,EAAE,SAAS,QAAQ;AAC/D,oBAAM,MAAM,EAAE,MAAM,eAAe,OAAO,OAAO;AACjD,qBAAO,KAAK,KAAK,UAAU,GAAG,CAAC;AAAA,YACjC;AAAA,UACF;AAEA,oBAAU;AACV,iBAAO,KAAK,KAAK,UAAU,EAAE,MAAM,mBAAmB,CAAC,CAAC;AAAA,QAC1D,SAAS,GAAG;AACV,cAAK,EAAY,YAAY,gBAAgB;AAE3C;AAAA,UACF;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAEA,YAAM,OAAO,OAAO,WAAwB;AAC1C,cAAM,oBAAoB,aAAa,OAAO;AAC9C,cAAM,SAAS,kBAAkB,UAAU;AAE3C,YAAI;AACF,iBAAO,CAAC,KAAK,UAAU,CAAC,OAAO,SAAS;AACtC,kBAAM,SAAS,MAAM,OAAO,KAAK;AACjC,gBAAI,OAAO,QAAS;AACpB,gBAAI,OAAO,KAAM;AAGjB,kBAAM,cAAc,MAAM,uCAAqB,eAAe,OAAO,KAAK;AAC1E,gBAAI,CAAC,YAAY,SAAS;AACxB,mBAAK,QAAQ;AAAA,gBACX,EAAE,OAAO,YAAY,OAAO,SAAS,OAAO,MAAM;AAAA,gBAClD;AAAA,cACF;AACA;AAAA,YACF;AAEA,kBAAM,QAAwB,YAAY;AAE1C,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AAAA,cACL,KAAK;AACH;AAAA,cACF,KAAK;AACH,gCAAgB;AAChB,gCAAgB;AAChB;AAAA,cACF,KAAK;AACH,qBAAK,kBAAkB,OAAO,KAAK;AACnC;AAAA,cACF,KAAK;AACH,qBAAK,kBAAkB,OAAO,IAAI;AAClC;AAAA,cACF,KAAK;AACH,qBAAK,QAAQ,MAAM,EAAE,OAAO,MAAM,GAAG,iCAAiC;AACtE,gCAAgB;AAChB,sBAAM,IAAI,2BAAS,+BAA+B,KAAK,UAAU,KAAK,CAAC,EAAE;AAAA,YAC7E;AAAA,UACF;AAAA,QACF,UAAE;AACA,iBAAO,YAAY;AACnB,cAAI;AACF,kBAAM,kBAAkB,OAAO;AAAA,UACjC,SAAS,GAAG;AACV,iBAAK,QAAQ,MAAM,kEAAkE,CAAC;AAAA,UACxF;AAAA,QACF;AAAA,MACF;AAEA,UAAI;AACF,aAAK,MAAM,KAAK,IAAI,UAAU,KAAK,YAAY,SAAS;AAExD,cAAM,aAAa,KAAK;AACxB,cAAM,WAAW,kBAAK,KAAK,CAAC,EAAE,OAAO,MAAM,KAAK,IAAK,MAAM,GAAG,UAAU;AACxE,cAAM,iBAAiB,kBAAK,KAAK,CAAC,EAAE,OAAO,MAAM,iBAAiB,IAAK,MAAM,GAAG,UAAU;AAC1F,cAAM,WAAW,kBAAK,KAAK,CAAC,EAAE,OAAO,MAAM,KAAK,MAAM,GAAG,UAAU;AACnE,cAAM,oBAAoB,kBAAK;AAAA,UAC7B,CAAC,EAAE,OAAO,MAAM,QAAQ,KAAK,CAAC,KAAK,eAAe,KAAK,OAAG,2BAAa,MAAM,CAAC,CAAC;AAAA,UAC/E;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,QAAQ,KAAK;AAAA,YACjB,QAAQ,IAAI,CAAC,SAAS,QAAQ,eAAe,QAAQ,SAAS,MAAM,CAAC;AAAA,YACrE,kBAAkB;AAAA,UACpB,CAAC;AAGD,cAAI,CAAC,kBAAkB,KAAM;AAG7B,eAAK,eAAe,MAAM;AAAA,QAC5B,UAAE;AAEA,oBAAM;AAAA,YACJ,CAAC,UAAU,gBAAgB,UAAU,iBAAiB;AAAA,YACtD;AAAA,UACF;AACA,0BAAgB;AAAA,QAClB;AAAA,MACF,UAAE;AAEA,wBAAgB;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,kBAAkB,MAA0B,SAAkB;AAEpE,QAAI,KAAK,MAAM,OAAQ;AAEvB,UAAM,YAAY,KAAK,cAAc,KAAK;AAC1C,UAAM,OAAO,KAAK;AAClB,UAAM,WAAW,KAAK,YAAY,KAAK,KAAK,YAAY;AAExD,QAAI,CAAC,QAAQ,CAAC,QAAS;AAEvB,QAAI;AAEF,UAAI,CAAC,KAAK,UAAU;AAClB,aAAK,WAAW;AAChB,aAAK,MAAM,IAAI,EAAE,MAAM,2BAAgB,gBAAgB,CAAC;AAAA,MAC1D;AAEA,YAAM,aAAyB;AAAA,QAC7B;AAAA,QACA,WAAW,KAAK,kBAAkB,KAAK;AAAA,QACvC,SAAS,KAAK,kBAAkB,KAAK,QAAQ,KAAK;AAAA,QAClD,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,OAAO,KAAK,MAAM;AAAA,UAChB,CAAC,UAAuB;AAAA,YACtB,MAAM,KAAK;AAAA,YACX,WAAW,KAAK,QAAQ,KAAK;AAAA,YAC7B,SAAS,KAAK,MAAM,KAAK;AAAA,YACzB,iBAAiB,KAAK;AAAA,YACtB,YAAY,KAAK;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAEA,UAAI,SAAS;AACX,YAAI,KAAK,iBAAiB,GAAG;AAC3B,eAAK,MAAM,IAAI;AAAA,YACb,MAAM,2BAAgB;AAAA,YACtB;AAAA,YACA,kBAAkB,EAAE,eAAe,KAAK,eAAe;AAAA,UACzD,CAAC;AACD,eAAK,iBAAiB;AAAA,QACxB;AAEA,aAAK,MAAM,IAAI;AAAA,UACb,MAAM,2BAAgB;AAAA,UACtB;AAAA,UACA,cAAc,CAAC,UAAU;AAAA,QAC3B,CAAC;AAED,YAAI,KAAK,UAAU;AACjB,eAAK,WAAW;AAChB,eAAK,MAAM,IAAI,EAAE,MAAM,2BAAgB,cAAc,CAAC;AAAA,QACxD;AAAA,MACF,OAAO;AACL,aAAK,MAAM,IAAI;AAAA,UACb,MAAM,2BAAgB;AAAA,UACtB;AAAA,UACA,cAAc,CAAC,UAAU;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF,SAAS,GAAG;AACV,UAAI,aAAa,SAAS,EAAE,QAAQ,SAAS,iBAAiB,GAAG;AAE/D,aAAK,QAAQ;AAAA,UACX,EAAE,KAAK,EAAE;AAAA,UACT;AAAA,QACF;AAAA,MACF,OAAO;AACL,aAAK,QAAQ,MAAM,EAAE,KAAK,EAAE,GAAG,mCAAmC;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AACF;","names":["import_utils","BaseSTT","BaseSpeechStream","ws"]}
|
|
1
|
+
{"version":3,"sources":["../../src/inference/stt.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { type AudioFrame } from '@livekit/rtc-node';\nimport type { WebSocket } from 'ws';\nimport { APIError, APIStatusError } from '../_exceptions.js';\nimport { AudioByteStream } from '../audio.js';\nimport { log } from '../log.js';\nimport { createStreamChannel } from '../stream/stream_channel.js';\nimport {\n STT as BaseSTT,\n SpeechStream as BaseSpeechStream,\n type SpeechData,\n type SpeechEvent,\n SpeechEventType,\n} from '../stt/index.js';\nimport { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS } from '../types.js';\nimport { type AudioBuffer, Event, Task, cancelAndWait, shortuuid, waitForAbort } from '../utils.js';\nimport type { TimedString } from '../voice/io.js';\nimport {\n type SttServerEvent,\n type SttTranscriptEvent,\n sttServerEventSchema,\n} from './api_protos.js';\nimport { type AnyString, connectWs, createAccessToken } from './utils.js';\n\nexport type DeepgramModels =\n | 'deepgram/flux-general'\n | 'deepgram/nova-3'\n | 'deepgram/nova-3-medical'\n | 'deepgram/nova-2'\n | 'deepgram/nova-2-medical'\n | 'deepgram/nova-2-conversationalai'\n | 'deepgram/nova-2-phonecall';\n\nexport type CartesiaModels = 'cartesia/ink-whisper';\n\nexport type AssemblyaiModels =\n | 'assemblyai/universal-streaming'\n | 'assemblyai/universal-streaming-multilingual';\n\nexport type ElevenlabsSTTModels = 'elevenlabs/scribe_v2_realtime';\n\nexport interface CartesiaOptions {\n min_volume?: number; // default: not specified\n max_silence_duration_secs?: number; // default: not specified\n}\n\nexport interface DeepgramOptions {\n filler_words?: boolean; // default: true\n interim_results?: boolean; // default: true\n endpointing?: number; // default: 25 (ms)\n punctuate?: boolean; // default: false\n smart_format?: boolean;\n keywords?: Array<[string, number]>;\n keyterms?: string[];\n profanity_filter?: boolean;\n numerals?: boolean;\n mip_opt_out?: boolean;\n}\n\nexport interface AssemblyAIOptions {\n format_turns?: boolean; // default: false\n end_of_turn_confidence_threshold?: number; // default: 0.01\n min_end_of_turn_silence_when_confident?: number; // default: 0\n max_turn_silence?: number; // default: not specified\n keyterms_prompt?: string[]; // default: not specified\n}\n\nexport type STTLanguages =\n | 'multi'\n | 'en'\n | 'de'\n | 'es'\n | 'fr'\n | 'ja'\n | 'pt'\n | 'zh'\n | 'hi'\n | AnyString;\n\ntype _STTModels = DeepgramModels | CartesiaModels | AssemblyaiModels | ElevenlabsSTTModels;\n\nexport type STTModels = _STTModels | 'auto' | AnyString;\n\nexport type ModelWithLanguage = `${_STTModels}:${STTLanguages}` | STTModels;\n\nexport type STTOptions<TModel extends STTModels> = TModel extends DeepgramModels\n ? DeepgramOptions\n : TModel extends CartesiaModels\n ? CartesiaOptions\n : TModel extends AssemblyaiModels\n ? AssemblyAIOptions\n : Record<string, unknown>;\n\nexport type STTEncoding = 'pcm_s16le';\n\nconst DEFAULT_ENCODING: STTEncoding = 'pcm_s16le';\nconst DEFAULT_SAMPLE_RATE = 16000;\nconst DEFAULT_BASE_URL = 'wss://agent-gateway.livekit.cloud/v1';\nconst DEFAULT_CANCEL_TIMEOUT = 5000;\n\nexport interface InferenceSTTOptions<TModel extends STTModels> {\n model?: TModel;\n language?: STTLanguages;\n encoding: STTEncoding;\n sampleRate: number;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: STTOptions<TModel>;\n}\n\n/**\n * Livekit Cloud Inference STT\n */\nexport class STT<TModel extends STTModels> extends BaseSTT {\n private opts: InferenceSTTOptions<TModel>;\n private streams: Set<SpeechStream<TModel>> = new Set();\n\n #logger = log();\n\n constructor(opts?: {\n model?: TModel;\n language?: STTLanguages;\n baseURL?: string;\n encoding?: STTEncoding;\n sampleRate?: number;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: STTOptions<TModel>;\n }) {\n super({ streaming: true, interimResults: true, alignedTranscript: 'word' });\n\n const {\n model,\n language,\n baseURL,\n encoding = DEFAULT_ENCODING,\n sampleRate = DEFAULT_SAMPLE_RATE,\n apiKey,\n apiSecret,\n modelOptions = {} as STTOptions<TModel>,\n } = opts || {};\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n language,\n encoding,\n sampleRate,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions,\n };\n }\n\n get label(): string {\n return 'inference.STT';\n }\n\n static fromModelString(modelString: string): STT<AnyString> {\n if (modelString.includes(':')) {\n const [model, language] = modelString.split(':') as [AnyString, STTLanguages];\n return new STT({ model, language });\n }\n return new STT({ model: modelString });\n }\n\n protected async _recognize(_: AudioBuffer): Promise<SpeechEvent> {\n throw new Error('LiveKit STT does not support batch recognition, use stream() instead');\n }\n\n updateOptions(opts: Partial<Pick<InferenceSTTOptions<TModel>, 'model' | 'language'>>): void {\n this.opts = { ...this.opts, ...opts };\n\n for (const stream of this.streams) {\n stream.updateOptions(opts);\n }\n }\n\n stream(options?: {\n language?: STTLanguages | string;\n connOptions?: APIConnectOptions;\n }): SpeechStream<TModel> {\n const { language, connOptions = DEFAULT_API_CONNECT_OPTIONS } = options || {};\n const streamOpts = {\n ...this.opts,\n language: language ?? this.opts.language,\n } as InferenceSTTOptions<TModel>;\n\n const stream = new SpeechStream(this, streamOpts, connOptions);\n this.streams.add(stream);\n\n return stream;\n }\n\n async connectWs(timeout: number): Promise<WebSocket> {\n const params = {\n settings: {\n sample_rate: String(this.opts.sampleRate),\n encoding: this.opts.encoding,\n extra: this.opts.modelOptions,\n },\n } as Record<string, unknown>;\n\n if (this.opts.model && this.opts.model !== 'auto') {\n params.model = this.opts.model;\n }\n\n if (this.opts.language) {\n (params.settings as Record<string, unknown>).language = this.opts.language;\n }\n\n let baseURL = this.opts.baseURL;\n if (baseURL.startsWith('http://') || baseURL.startsWith('https://')) {\n baseURL = baseURL.replace('http', 'ws');\n }\n\n const token = await createAccessToken(this.opts.apiKey, this.opts.apiSecret);\n const url = `${baseURL}/stt`;\n const headers = { Authorization: `Bearer ${token}` } as Record<string, string>;\n\n const socket = await connectWs(url, headers, timeout);\n const msg = { ...params, type: 'session.create' };\n socket.send(JSON.stringify(msg));\n\n return socket;\n }\n}\n\nexport class SpeechStream<TModel extends STTModels> extends BaseSpeechStream {\n private opts: InferenceSTTOptions<TModel>;\n private requestId = shortuuid('stt_request_');\n private speaking = false;\n private speechDuration = 0;\n private reconnectEvent = new Event();\n private stt: STT<TModel>;\n private connOptions: APIConnectOptions;\n\n #logger = log();\n\n constructor(\n sttImpl: STT<TModel>,\n opts: InferenceSTTOptions<TModel>,\n connOptions: APIConnectOptions,\n ) {\n super(sttImpl, opts.sampleRate, connOptions);\n this.opts = opts;\n this.stt = sttImpl;\n this.connOptions = connOptions;\n }\n\n get label(): string {\n return 'inference.SpeechStream';\n }\n\n updateOptions(opts: Partial<Pick<InferenceSTTOptions<TModel>, 'model' | 'language'>>): void {\n this.opts = { ...this.opts, ...opts };\n this.reconnectEvent.set();\n }\n\n protected async run(): Promise<void> {\n while (true) {\n // Create fresh resources for each connection attempt\n let ws: WebSocket | null = null;\n let closing = false;\n let finalReceived = false;\n\n const eventChannel = createStreamChannel<SttServerEvent>();\n\n const resourceCleanup = () => {\n if (closing) return;\n closing = true;\n eventChannel.close();\n ws?.removeAllListeners();\n ws?.close();\n };\n\n const createWsListener = async (ws: WebSocket, signal: AbortSignal) => {\n return new Promise<void>((resolve, reject) => {\n const onAbort = () => {\n resourceCleanup();\n reject(new Error('WebSocket connection aborted'));\n };\n\n signal.addEventListener('abort', onAbort, { once: true });\n\n ws.on('message', (data) => {\n const json = JSON.parse(data.toString()) as SttServerEvent;\n eventChannel.write(json);\n });\n\n ws.on('error', (e) => {\n this.#logger.error({ error: e }, 'WebSocket error');\n resourceCleanup();\n reject(e);\n });\n\n ws.on('close', (code: number) => {\n resourceCleanup();\n\n if (!closing) return this.#logger.error('WebSocket closed unexpectedly');\n if (finalReceived) return resolve();\n\n reject(\n new APIStatusError({\n message: 'LiveKit STT connection closed unexpectedly',\n options: { statusCode: code },\n }),\n );\n });\n });\n };\n\n const send = async (socket: WebSocket, signal: AbortSignal) => {\n const audioStream = new AudioByteStream(\n this.opts.sampleRate,\n 1,\n Math.floor(this.opts.sampleRate / 20), // 50ms\n );\n\n // Create abort promise once to avoid memory leak\n const abortPromise = new Promise<never>((_, reject) => {\n if (signal.aborted) {\n return reject(new Error('Send aborted'));\n }\n const onAbort = () => reject(new Error('Send aborted'));\n signal.addEventListener('abort', onAbort, { once: true });\n });\n\n // Manual iteration to support cancellation\n const iterator = this.input[Symbol.asyncIterator]();\n try {\n while (true) {\n const result = await Promise.race([iterator.next(), abortPromise]);\n\n if (result.done) break;\n const ev = result.value;\n\n let frames: AudioFrame[];\n if (ev === SpeechStream.FLUSH_SENTINEL) {\n frames = audioStream.flush();\n } else {\n const frame = ev as AudioFrame;\n frames = audioStream.write(new Int16Array(frame.data).buffer);\n }\n\n for (const frame of frames) {\n this.speechDuration += frame.samplesPerChannel / frame.sampleRate;\n const base64 = Buffer.from(frame.data.buffer).toString('base64');\n const msg = { type: 'input_audio', audio: base64 };\n socket.send(JSON.stringify(msg));\n }\n }\n\n closing = true;\n socket.send(JSON.stringify({ type: 'session.finalize' }));\n } catch (e) {\n if ((e as Error).message === 'Send aborted') {\n // Expected abort, don't log\n return;\n }\n throw e;\n }\n };\n\n const recv = async (signal: AbortSignal) => {\n const serverEventStream = eventChannel.stream();\n const reader = serverEventStream.getReader();\n\n try {\n while (!this.closed && !signal.aborted) {\n const result = await reader.read();\n if (signal.aborted) return;\n if (result.done) return;\n\n // Parse and validate with Zod schema\n const parseResult = await sttServerEventSchema.safeParseAsync(result.value);\n if (!parseResult.success) {\n this.#logger.warn(\n { error: parseResult.error, rawData: result.value },\n 'Failed to parse STT server event',\n );\n continue;\n }\n\n const event: SttServerEvent = parseResult.data;\n\n switch (event.type) {\n case 'session.created':\n case 'session.finalized':\n break;\n case 'session.closed':\n finalReceived = true;\n resourceCleanup();\n break;\n case 'interim_transcript':\n this.processTranscript(event, false);\n break;\n case 'final_transcript':\n this.processTranscript(event, true);\n break;\n case 'error':\n this.#logger.error({ error: event }, 'Received error from LiveKit STT');\n resourceCleanup();\n throw new APIError(`LiveKit STT returned error: ${JSON.stringify(event)}`);\n }\n }\n } finally {\n reader.releaseLock();\n try {\n await serverEventStream.cancel();\n } catch (e) {\n this.#logger.debug('Error cancelling serverEventStream (may already be cancelled):', e);\n }\n }\n };\n\n try {\n ws = await this.stt.connectWs(this.connOptions.timeoutMs);\n\n const controller = this.abortController; // Use base class abortController for proper cancellation\n const sendTask = Task.from(({ signal }) => send(ws!, signal), controller);\n const wsListenerTask = Task.from(({ signal }) => createWsListener(ws!, signal), controller);\n const recvTask = Task.from(({ signal }) => recv(signal), controller);\n const waitReconnectTask = Task.from(\n ({ signal }) => Promise.race([this.reconnectEvent.wait(), waitForAbort(signal)]),\n controller,\n );\n\n try {\n await Promise.race([\n Promise.all([sendTask.result, wsListenerTask.result, recvTask.result]),\n waitReconnectTask.result,\n ]);\n\n // If reconnect didn't trigger, tasks finished - exit loop\n if (!waitReconnectTask.done) break;\n\n // Reconnect triggered - clear event and continue loop\n this.reconnectEvent.clear();\n } finally {\n // Cancel all tasks to ensure cleanup\n await cancelAndWait(\n [sendTask, wsListenerTask, recvTask, waitReconnectTask],\n DEFAULT_CANCEL_TIMEOUT,\n );\n resourceCleanup();\n }\n } finally {\n // Ensure cleanup even if connectWs throws\n resourceCleanup();\n }\n }\n }\n\n private processTranscript(data: SttTranscriptEvent, isFinal: boolean) {\n // Check if queue is closed to avoid race condition during disconnect\n if (this.queue.closed) return;\n\n const requestId = data.session_id || this.requestId;\n const text = data.transcript;\n const language = data.language || this.opts.language || 'en';\n\n if (!text && !isFinal) return;\n\n try {\n // We'll have a more accurate way of detecting when speech started when we have VAD\n if (!this.speaking) {\n this.speaking = true;\n this.queue.put({ type: SpeechEventType.START_OF_SPEECH });\n }\n\n const speechData: SpeechData = {\n language,\n startTime: this.startTimeOffset + data.start,\n endTime: this.startTimeOffset + data.start + data.duration,\n confidence: data.confidence,\n text,\n words: data.words.map(\n (word): TimedString => ({\n text: word.word,\n startTime: word.start + this.startTimeOffset,\n endTime: word.end + this.startTimeOffset,\n startTimeOffset: this.startTimeOffset,\n confidence: word.confidence,\n }),\n ),\n };\n\n if (isFinal) {\n if (this.speechDuration > 0) {\n this.queue.put({\n type: SpeechEventType.RECOGNITION_USAGE,\n requestId,\n recognitionUsage: { audioDuration: this.speechDuration },\n });\n this.speechDuration = 0;\n }\n\n this.queue.put({\n type: SpeechEventType.FINAL_TRANSCRIPT,\n requestId,\n alternatives: [speechData],\n });\n\n if (this.speaking) {\n this.speaking = false;\n this.queue.put({ type: SpeechEventType.END_OF_SPEECH });\n }\n } else {\n this.queue.put({\n type: SpeechEventType.INTERIM_TRANSCRIPT,\n requestId,\n alternatives: [speechData],\n });\n }\n } catch (e) {\n if (e instanceof Error && e.message.includes('Queue is closed')) {\n // Expected behavior on disconnect, log as warning\n this.#logger.warn(\n { err: e },\n 'Queue closed during transcript processing (expected during disconnect)',\n );\n } else {\n this.#logger.error({ err: e }, 'Error putting transcript to queue');\n }\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,sBAAgC;AAEhC,wBAAyC;AACzC,mBAAgC;AAChC,iBAAoB;AACpB,4BAAoC;AACpC,iBAMO;AACP,mBAAoE;AACpE,mBAAsF;AAEtF,wBAIO;AACP,IAAAA,gBAA6D;AAyE7D,MAAM,mBAAgC;AACtC,MAAM,sBAAsB;AAC5B,MAAM,mBAAmB;AACzB,MAAM,yBAAyB;AAgBxB,MAAM,YAAsC,WAAAC,IAAQ;AAAA,EACjD;AAAA,EACA,UAAqC,oBAAI,IAAI;AAAA,EAErD,cAAU,gBAAI;AAAA,EAEd,YAAY,MAST;AACD,UAAM,EAAE,WAAW,MAAM,gBAAgB,MAAM,mBAAmB,OAAO,CAAC;AAE1E,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,eAAe,CAAC;AAAA,IAClB,IAAI,QAAQ,CAAC;AAEb,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,OAAO,gBAAgB,aAAqC;AAC1D,QAAI,YAAY,SAAS,GAAG,GAAG;AAC7B,YAAM,CAAC,OAAO,QAAQ,IAAI,YAAY,MAAM,GAAG;AAC/C,aAAO,IAAI,IAAI,EAAE,OAAO,SAAS,CAAC;AAAA,IACpC;AACA,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,MAAgB,WAAW,GAAsC;AAC/D,UAAM,IAAI,MAAM,sEAAsE;AAAA,EACxF;AAAA,EAEA,cAAc,MAA8E;AAC1F,SAAK,OAAO,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK;AAEpC,eAAW,UAAU,KAAK,SAAS;AACjC,aAAO,cAAc,IAAI;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,OAAO,SAGkB;AACvB,UAAM,EAAE,UAAU,cAAc,yCAA4B,IAAI,WAAW,CAAC;AAC5E,UAAM,aAAa;AAAA,MACjB,GAAG,KAAK;AAAA,MACR,UAAU,YAAY,KAAK,KAAK;AAAA,IAClC;AAEA,UAAM,SAAS,IAAI,aAAa,MAAM,YAAY,WAAW;AAC7D,SAAK,QAAQ,IAAI,MAAM;AAEvB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,UAAU,SAAqC;AACnD,UAAM,SAAS;AAAA,MACb,UAAU;AAAA,QACR,aAAa,OAAO,KAAK,KAAK,UAAU;AAAA,QACxC,UAAU,KAAK,KAAK;AAAA,QACpB,OAAO,KAAK,KAAK;AAAA,MACnB;AAAA,IACF;AAEA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,UAAU,QAAQ;AACjD,aAAO,QAAQ,KAAK,KAAK;AAAA,IAC3B;AAEA,QAAI,KAAK,KAAK,UAAU;AACtB,MAAC,OAAO,SAAqC,WAAW,KAAK,KAAK;AAAA,IACpE;AAEA,QAAI,UAAU,KAAK,KAAK;AACxB,QAAI,QAAQ,WAAW,SAAS,KAAK,QAAQ,WAAW,UAAU,GAAG;AACnE,gBAAU,QAAQ,QAAQ,QAAQ,IAAI;AAAA,IACxC;AAEA,UAAM,QAAQ,UAAM,iCAAkB,KAAK,KAAK,QAAQ,KAAK,KAAK,SAAS;AAC3E,UAAM,MAAM,GAAG,OAAO;AACtB,UAAM,UAAU,EAAE,eAAe,UAAU,KAAK,GAAG;AAEnD,UAAM,SAAS,UAAM,yBAAU,KAAK,SAAS,OAAO;AACpD,UAAM,MAAM,EAAE,GAAG,QAAQ,MAAM,iBAAiB;AAChD,WAAO,KAAK,KAAK,UAAU,GAAG,CAAC;AAE/B,WAAO;AAAA,EACT;AACF;AAEO,MAAM,qBAA+C,WAAAC,aAAiB;AAAA,EACnE;AAAA,EACA,gBAAY,wBAAU,cAAc;AAAA,EACpC,WAAW;AAAA,EACX,iBAAiB;AAAA,EACjB,iBAAiB,IAAI,mBAAM;AAAA,EAC3B;AAAA,EACA;AAAA,EAER,cAAU,gBAAI;AAAA,EAEd,YACE,SACA,MACA,aACA;AACA,UAAM,SAAS,KAAK,YAAY,WAAW;AAC3C,SAAK,OAAO;AACZ,SAAK,MAAM;AACX,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,cAAc,MAA8E;AAC1F,SAAK,OAAO,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK;AACpC,SAAK,eAAe,IAAI;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AACnC,WAAO,MAAM;AAEX,UAAI,KAAuB;AAC3B,UAAI,UAAU;AACd,UAAI,gBAAgB;AAEpB,YAAM,mBAAe,2CAAoC;AAEzD,YAAM,kBAAkB,MAAM;AAC5B,YAAI,QAAS;AACb,kBAAU;AACV,qBAAa,MAAM;AACnB,iCAAI;AACJ,iCAAI;AAAA,MACN;AAEA,YAAM,mBAAmB,OAAOC,KAAe,WAAwB;AACrE,eAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,gBAAM,UAAU,MAAM;AACpB,4BAAgB;AAChB,mBAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,UAClD;AAEA,iBAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAExD,UAAAA,IAAG,GAAG,WAAW,CAAC,SAAS;AACzB,kBAAM,OAAO,KAAK,MAAM,KAAK,SAAS,CAAC;AACvC,yBAAa,MAAM,IAAI;AAAA,UACzB,CAAC;AAED,UAAAA,IAAG,GAAG,SAAS,CAAC,MAAM;AACpB,iBAAK,QAAQ,MAAM,EAAE,OAAO,EAAE,GAAG,iBAAiB;AAClD,4BAAgB;AAChB,mBAAO,CAAC;AAAA,UACV,CAAC;AAED,UAAAA,IAAG,GAAG,SAAS,CAAC,SAAiB;AAC/B,4BAAgB;AAEhB,gBAAI,CAAC,QAAS,QAAO,KAAK,QAAQ,MAAM,+BAA+B;AACvE,gBAAI,cAAe,QAAO,QAAQ;AAElC;AAAA,cACE,IAAI,iCAAe;AAAA,gBACjB,SAAS;AAAA,gBACT,SAAS,EAAE,YAAY,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAEA,YAAM,OAAO,OAAO,QAAmB,WAAwB;AAC7D,cAAM,cAAc,IAAI;AAAA,UACtB,KAAK,KAAK;AAAA,UACV;AAAA,UACA,KAAK,MAAM,KAAK,KAAK,aAAa,EAAE;AAAA;AAAA,QACtC;AAGA,cAAM,eAAe,IAAI,QAAe,CAAC,GAAG,WAAW;AACrD,cAAI,OAAO,SAAS;AAClB,mBAAO,OAAO,IAAI,MAAM,cAAc,CAAC;AAAA,UACzC;AACA,gBAAM,UAAU,MAAM,OAAO,IAAI,MAAM,cAAc,CAAC;AACtD,iBAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,QAC1D,CAAC;AAGD,cAAM,WAAW,KAAK,MAAM,OAAO,aAAa,EAAE;AAClD,YAAI;AACF,iBAAO,MAAM;AACX,kBAAM,SAAS,MAAM,QAAQ,KAAK,CAAC,SAAS,KAAK,GAAG,YAAY,CAAC;AAEjE,gBAAI,OAAO,KAAM;AACjB,kBAAM,KAAK,OAAO;AAElB,gBAAI;AACJ,gBAAI,OAAO,aAAa,gBAAgB;AACtC,uBAAS,YAAY,MAAM;AAAA,YAC7B,OAAO;AACL,oBAAM,QAAQ;AACd,uBAAS,YAAY,MAAM,IAAI,WAAW,MAAM,IAAI,EAAE,MAAM;AAAA,YAC9D;AAEA,uBAAW,SAAS,QAAQ;AAC1B,mBAAK,kBAAkB,MAAM,oBAAoB,MAAM;AACvD,oBAAM,SAAS,OAAO,KAAK,MAAM,KAAK,MAAM,EAAE,SAAS,QAAQ;AAC/D,oBAAM,MAAM,EAAE,MAAM,eAAe,OAAO,OAAO;AACjD,qBAAO,KAAK,KAAK,UAAU,GAAG,CAAC;AAAA,YACjC;AAAA,UACF;AAEA,oBAAU;AACV,iBAAO,KAAK,KAAK,UAAU,EAAE,MAAM,mBAAmB,CAAC,CAAC;AAAA,QAC1D,SAAS,GAAG;AACV,cAAK,EAAY,YAAY,gBAAgB;AAE3C;AAAA,UACF;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAEA,YAAM,OAAO,OAAO,WAAwB;AAC1C,cAAM,oBAAoB,aAAa,OAAO;AAC9C,cAAM,SAAS,kBAAkB,UAAU;AAE3C,YAAI;AACF,iBAAO,CAAC,KAAK,UAAU,CAAC,OAAO,SAAS;AACtC,kBAAM,SAAS,MAAM,OAAO,KAAK;AACjC,gBAAI,OAAO,QAAS;AACpB,gBAAI,OAAO,KAAM;AAGjB,kBAAM,cAAc,MAAM,uCAAqB,eAAe,OAAO,KAAK;AAC1E,gBAAI,CAAC,YAAY,SAAS;AACxB,mBAAK,QAAQ;AAAA,gBACX,EAAE,OAAO,YAAY,OAAO,SAAS,OAAO,MAAM;AAAA,gBAClD;AAAA,cACF;AACA;AAAA,YACF;AAEA,kBAAM,QAAwB,YAAY;AAE1C,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AAAA,cACL,KAAK;AACH;AAAA,cACF,KAAK;AACH,gCAAgB;AAChB,gCAAgB;AAChB;AAAA,cACF,KAAK;AACH,qBAAK,kBAAkB,OAAO,KAAK;AACnC;AAAA,cACF,KAAK;AACH,qBAAK,kBAAkB,OAAO,IAAI;AAClC;AAAA,cACF,KAAK;AACH,qBAAK,QAAQ,MAAM,EAAE,OAAO,MAAM,GAAG,iCAAiC;AACtE,gCAAgB;AAChB,sBAAM,IAAI,2BAAS,+BAA+B,KAAK,UAAU,KAAK,CAAC,EAAE;AAAA,YAC7E;AAAA,UACF;AAAA,QACF,UAAE;AACA,iBAAO,YAAY;AACnB,cAAI;AACF,kBAAM,kBAAkB,OAAO;AAAA,UACjC,SAAS,GAAG;AACV,iBAAK,QAAQ,MAAM,kEAAkE,CAAC;AAAA,UACxF;AAAA,QACF;AAAA,MACF;AAEA,UAAI;AACF,aAAK,MAAM,KAAK,IAAI,UAAU,KAAK,YAAY,SAAS;AAExD,cAAM,aAAa,KAAK;AACxB,cAAM,WAAW,kBAAK,KAAK,CAAC,EAAE,OAAO,MAAM,KAAK,IAAK,MAAM,GAAG,UAAU;AACxE,cAAM,iBAAiB,kBAAK,KAAK,CAAC,EAAE,OAAO,MAAM,iBAAiB,IAAK,MAAM,GAAG,UAAU;AAC1F,cAAM,WAAW,kBAAK,KAAK,CAAC,EAAE,OAAO,MAAM,KAAK,MAAM,GAAG,UAAU;AACnE,cAAM,oBAAoB,kBAAK;AAAA,UAC7B,CAAC,EAAE,OAAO,MAAM,QAAQ,KAAK,CAAC,KAAK,eAAe,KAAK,OAAG,2BAAa,MAAM,CAAC,CAAC;AAAA,UAC/E;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,QAAQ,KAAK;AAAA,YACjB,QAAQ,IAAI,CAAC,SAAS,QAAQ,eAAe,QAAQ,SAAS,MAAM,CAAC;AAAA,YACrE,kBAAkB;AAAA,UACpB,CAAC;AAGD,cAAI,CAAC,kBAAkB,KAAM;AAG7B,eAAK,eAAe,MAAM;AAAA,QAC5B,UAAE;AAEA,oBAAM;AAAA,YACJ,CAAC,UAAU,gBAAgB,UAAU,iBAAiB;AAAA,YACtD;AAAA,UACF;AACA,0BAAgB;AAAA,QAClB;AAAA,MACF,UAAE;AAEA,wBAAgB;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,kBAAkB,MAA0B,SAAkB;AAEpE,QAAI,KAAK,MAAM,OAAQ;AAEvB,UAAM,YAAY,KAAK,cAAc,KAAK;AAC1C,UAAM,OAAO,KAAK;AAClB,UAAM,WAAW,KAAK,YAAY,KAAK,KAAK,YAAY;AAExD,QAAI,CAAC,QAAQ,CAAC,QAAS;AAEvB,QAAI;AAEF,UAAI,CAAC,KAAK,UAAU;AAClB,aAAK,WAAW;AAChB,aAAK,MAAM,IAAI,EAAE,MAAM,2BAAgB,gBAAgB,CAAC;AAAA,MAC1D;AAEA,YAAM,aAAyB;AAAA,QAC7B;AAAA,QACA,WAAW,KAAK,kBAAkB,KAAK;AAAA,QACvC,SAAS,KAAK,kBAAkB,KAAK,QAAQ,KAAK;AAAA,QAClD,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,OAAO,KAAK,MAAM;AAAA,UAChB,CAAC,UAAuB;AAAA,YACtB,MAAM,KAAK;AAAA,YACX,WAAW,KAAK,QAAQ,KAAK;AAAA,YAC7B,SAAS,KAAK,MAAM,KAAK;AAAA,YACzB,iBAAiB,KAAK;AAAA,YACtB,YAAY,KAAK;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAEA,UAAI,SAAS;AACX,YAAI,KAAK,iBAAiB,GAAG;AAC3B,eAAK,MAAM,IAAI;AAAA,YACb,MAAM,2BAAgB;AAAA,YACtB;AAAA,YACA,kBAAkB,EAAE,eAAe,KAAK,eAAe;AAAA,UACzD,CAAC;AACD,eAAK,iBAAiB;AAAA,QACxB;AAEA,aAAK,MAAM,IAAI;AAAA,UACb,MAAM,2BAAgB;AAAA,UACtB;AAAA,UACA,cAAc,CAAC,UAAU;AAAA,QAC3B,CAAC;AAED,YAAI,KAAK,UAAU;AACjB,eAAK,WAAW;AAChB,eAAK,MAAM,IAAI,EAAE,MAAM,2BAAgB,cAAc,CAAC;AAAA,QACxD;AAAA,MACF,OAAO;AACL,aAAK,MAAM,IAAI;AAAA,UACb,MAAM,2BAAgB;AAAA,UACtB;AAAA,UACA,cAAc,CAAC,UAAU;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF,SAAS,GAAG;AACV,UAAI,aAAa,SAAS,EAAE,QAAQ,SAAS,iBAAiB,GAAG;AAE/D,aAAK,QAAQ;AAAA,UACX,EAAE,KAAK,EAAE;AAAA,UACT;AAAA,QACF;AAAA,MACF,OAAO;AACL,aAAK,QAAQ,MAAM,EAAE,KAAK,EAAE,GAAG,mCAAmC;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AACF;","names":["import_utils","BaseSTT","BaseSpeechStream","ws"]}
|
package/dist/inference/stt.d.cts
CHANGED
|
@@ -3,9 +3,10 @@ import { STT as BaseSTT, SpeechStream as BaseSpeechStream, type SpeechEvent } fr
|
|
|
3
3
|
import { type APIConnectOptions } from '../types.js';
|
|
4
4
|
import { type AudioBuffer } from '../utils.js';
|
|
5
5
|
import { type AnyString } from './utils.js';
|
|
6
|
-
export type DeepgramModels = 'deepgram
|
|
7
|
-
export type CartesiaModels = 'cartesia
|
|
8
|
-
export type AssemblyaiModels = 'assemblyai' | 'assemblyai/universal-streaming';
|
|
6
|
+
export type DeepgramModels = 'deepgram/flux-general' | 'deepgram/nova-3' | 'deepgram/nova-3-medical' | 'deepgram/nova-2' | 'deepgram/nova-2-medical' | 'deepgram/nova-2-conversationalai' | 'deepgram/nova-2-phonecall';
|
|
7
|
+
export type CartesiaModels = 'cartesia/ink-whisper';
|
|
8
|
+
export type AssemblyaiModels = 'assemblyai/universal-streaming' | 'assemblyai/universal-streaming-multilingual';
|
|
9
|
+
export type ElevenlabsSTTModels = 'elevenlabs/scribe_v2_realtime';
|
|
9
10
|
export interface CartesiaOptions {
|
|
10
11
|
min_volume?: number;
|
|
11
12
|
max_silence_duration_secs?: number;
|
|
@@ -30,7 +31,7 @@ export interface AssemblyAIOptions {
|
|
|
30
31
|
keyterms_prompt?: string[];
|
|
31
32
|
}
|
|
32
33
|
export type STTLanguages = 'multi' | 'en' | 'de' | 'es' | 'fr' | 'ja' | 'pt' | 'zh' | 'hi' | AnyString;
|
|
33
|
-
type _STTModels = DeepgramModels | CartesiaModels | AssemblyaiModels;
|
|
34
|
+
type _STTModels = DeepgramModels | CartesiaModels | AssemblyaiModels | ElevenlabsSTTModels;
|
|
34
35
|
export type STTModels = _STTModels | 'auto' | AnyString;
|
|
35
36
|
export type ModelWithLanguage = `${_STTModels}:${STTLanguages}` | STTModels;
|
|
36
37
|
export type STTOptions<TModel extends STTModels> = TModel extends DeepgramModels ? DeepgramOptions : TModel extends CartesiaModels ? CartesiaOptions : TModel extends AssemblyaiModels ? AssemblyAIOptions : Record<string, unknown>;
|
package/dist/inference/stt.d.ts
CHANGED
|
@@ -3,9 +3,10 @@ import { STT as BaseSTT, SpeechStream as BaseSpeechStream, type SpeechEvent } fr
|
|
|
3
3
|
import { type APIConnectOptions } from '../types.js';
|
|
4
4
|
import { type AudioBuffer } from '../utils.js';
|
|
5
5
|
import { type AnyString } from './utils.js';
|
|
6
|
-
export type DeepgramModels = 'deepgram
|
|
7
|
-
export type CartesiaModels = 'cartesia
|
|
8
|
-
export type AssemblyaiModels = 'assemblyai' | 'assemblyai/universal-streaming';
|
|
6
|
+
export type DeepgramModels = 'deepgram/flux-general' | 'deepgram/nova-3' | 'deepgram/nova-3-medical' | 'deepgram/nova-2' | 'deepgram/nova-2-medical' | 'deepgram/nova-2-conversationalai' | 'deepgram/nova-2-phonecall';
|
|
7
|
+
export type CartesiaModels = 'cartesia/ink-whisper';
|
|
8
|
+
export type AssemblyaiModels = 'assemblyai/universal-streaming' | 'assemblyai/universal-streaming-multilingual';
|
|
9
|
+
export type ElevenlabsSTTModels = 'elevenlabs/scribe_v2_realtime';
|
|
9
10
|
export interface CartesiaOptions {
|
|
10
11
|
min_volume?: number;
|
|
11
12
|
max_silence_duration_secs?: number;
|
|
@@ -30,7 +31,7 @@ export interface AssemblyAIOptions {
|
|
|
30
31
|
keyterms_prompt?: string[];
|
|
31
32
|
}
|
|
32
33
|
export type STTLanguages = 'multi' | 'en' | 'de' | 'es' | 'fr' | 'ja' | 'pt' | 'zh' | 'hi' | AnyString;
|
|
33
|
-
type _STTModels = DeepgramModels | CartesiaModels | AssemblyaiModels;
|
|
34
|
+
type _STTModels = DeepgramModels | CartesiaModels | AssemblyaiModels | ElevenlabsSTTModels;
|
|
34
35
|
export type STTModels = _STTModels | 'auto' | AnyString;
|
|
35
36
|
export type ModelWithLanguage = `${_STTModels}:${STTLanguages}` | STTModels;
|
|
36
37
|
export type STTOptions<TModel extends STTModels> = TModel extends DeepgramModels ? DeepgramOptions : TModel extends CartesiaModels ? CartesiaOptions : TModel extends AssemblyaiModels ? AssemblyAIOptions : Record<string, unknown>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"stt.d.ts","sourceRoot":"","sources":["../../src/inference/stt.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,IAAI,CAAC;AAKpC,OAAO,EACL,GAAG,IAAI,OAAO,EACd,YAAY,IAAI,gBAAgB,EAEhC,KAAK,WAAW,EAEjB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,KAAK,iBAAiB,EAA+B,MAAM,aAAa,CAAC;AAClF,OAAO,EAAE,KAAK,WAAW,EAAuD,MAAM,aAAa,CAAC;AAOpG,OAAO,EAAE,KAAK,SAAS,EAAgC,MAAM,YAAY,CAAC;AAE1E,MAAM,MAAM,cAAc,GACtB,
|
|
1
|
+
{"version":3,"file":"stt.d.ts","sourceRoot":"","sources":["../../src/inference/stt.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,IAAI,CAAC;AAKpC,OAAO,EACL,GAAG,IAAI,OAAO,EACd,YAAY,IAAI,gBAAgB,EAEhC,KAAK,WAAW,EAEjB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,KAAK,iBAAiB,EAA+B,MAAM,aAAa,CAAC;AAClF,OAAO,EAAE,KAAK,WAAW,EAAuD,MAAM,aAAa,CAAC;AAOpG,OAAO,EAAE,KAAK,SAAS,EAAgC,MAAM,YAAY,CAAC;AAE1E,MAAM,MAAM,cAAc,GACtB,uBAAuB,GACvB,iBAAiB,GACjB,yBAAyB,GACzB,iBAAiB,GACjB,yBAAyB,GACzB,kCAAkC,GAClC,2BAA2B,CAAC;AAEhC,MAAM,MAAM,cAAc,GAAG,sBAAsB,CAAC;AAEpD,MAAM,MAAM,gBAAgB,GACxB,gCAAgC,GAChC,6CAA6C,CAAC;AAElD,MAAM,MAAM,mBAAmB,GAAG,+BAA+B,CAAC;AAElE,MAAM,WAAW,eAAe;IAC9B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,yBAAyB,CAAC,EAAE,MAAM,CAAC;CACpC;AAED,MAAM,WAAW,eAAe;IAC9B,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;IACnC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;IACpB,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,MAAM,WAAW,iBAAiB;IAChC,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,gCAAgC,CAAC,EAAE,MAAM,CAAC;IAC1C,sCAAsC,CAAC,EAAE,MAAM,CAAC;IAChD,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;CAC5B;AAED,MAAM,MAAM,YAAY,GACpB,OAAO,GACP,IAAI,GACJ,IAAI,GACJ,IAAI,GACJ,IAAI,GACJ,IAAI,GACJ,IAAI,GACJ,IAAI,GACJ,IAAI,GACJ,SAAS,CAAC;AAEd,KAAK,UAAU,GAAG,cAAc,GAAG,cAAc,GAAG,gBAAgB,GAAG,mBAAmB,CAAC;AAE3F,MAAM,MAAM,SAAS,GAAG,UAAU,GAAG,MAAM,GAAG,SAAS,CAAC;AAExD,MAAM,MAAM,iBAAiB,GAAG,GAAG,UAAU,IAAI,YAAY,EAAE,GAAG,SAAS,CAAC;AAE5E,MAAM,MAAM,UAAU,CAAC,MAAM,SAAS,SAAS,IAAI,MAAM,SAAS,cAAc,GAC5E,eAAe,GACf,MAAM,SAAS,cAAc,GAC3B,eAAe,GACf,MAAM,SAAS,gBAAgB,GAC7B,iBAAiB,GACjB,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAEhC,MAAM,MAAM,WAAW,GAAG,WAAW,CAAC;AAOtC,MAAM,WAAW,mBAAmB,CAAC,MAAM,SAAS,SAAS;IAC3D,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,QAAQ,EAAE,WAAW,CAAC;IACtB,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,YAAY,EAAE,UAAU,CAAC,MAAM,CAAC,CAAC;CAClC;AAED;;GAEG;AACH,qBAAa,GAAG,CAAC,MAAM,SAAS,SAAS,CAAE,SAAQ,OAAO;;IACxD,OAAO,CAAC,IAAI,CAA8B;IAC1C,OAAO,CAAC,OAAO,CAAwC;gBAI3C,IAAI,CAAC,EAAE;QACjB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,QAAQ,CAAC,EAAE,YAAY,CAAC;QACxB,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,QAAQ,CAAC,EAAE,WAAW,CAAC;QACvB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,YAAY,CAAC,EAAE,UAAU,CAAC,MAAM,CAAC,CAAC;KACnC;IAsCD,IAAI,KAAK,IAAI,MAAM,CAElB;IAED,MAAM,CAAC,eAAe,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG,CAAC,SAAS,CAAC;cAQ3C,UAAU,CAAC,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAIhE,aAAa,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,mBAAmB,CAAC,MAAM,CAAC,EAAE,OAAO,GAAG,UAAU,CAAC,CAAC,GAAG,IAAI;IAQ3F,MAAM,CAAC,OAAO,CAAC,EAAE;QACf,QAAQ,CAAC,EAAE,YAAY,GAAG,MAAM,CAAC;QACjC,WAAW,CAAC,EAAE,iBAAiB,CAAC;KACjC,GAAG,YAAY,CAAC,MAAM,CAAC;IAalB,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC;CAgCrD;AAED,qBAAa,YAAY,CAAC,MAAM,SAAS,SAAS,CAAE,SAAQ,gBAAgB;;IAC1E,OAAO,CAAC,IAAI,CAA8B;IAC1C,OAAO,CAAC,SAAS,CAA6B;IAC9C,OAAO,CAAC,QAAQ,CAAS;IACzB,OAAO,CAAC,cAAc,CAAK;IAC3B,OAAO,CAAC,cAAc,CAAe;IACrC,OAAO,CAAC,GAAG,CAAc;IACzB,OAAO,CAAC,WAAW,CAAoB;gBAKrC,OAAO,EAAE,GAAG,CAAC,MAAM,CAAC,EACpB,IAAI,EAAE,mBAAmB,CAAC,MAAM,CAAC,EACjC,WAAW,EAAE,iBAAiB;IAQhC,IAAI,KAAK,IAAI,MAAM,CAElB;IAED,aAAa,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,mBAAmB,CAAC,MAAM,CAAC,EAAE,OAAO,GAAG,UAAU,CAAC,CAAC,GAAG,IAAI;cAK3E,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IAmMpC,OAAO,CAAC,iBAAiB;CAyE1B"}
|