@livekit/agents-plugin-openai 1.2.1 → 1.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1 -1
- package/dist/index.js +1 -1
- package/dist/llm.cjs.map +1 -1
- package/dist/llm.js.map +1 -1
- package/dist/realtime/realtime_model.cjs +27 -2
- package/dist/realtime/realtime_model.cjs.map +1 -1
- package/dist/realtime/realtime_model.d.cts +1 -0
- package/dist/realtime/realtime_model.d.ts +1 -0
- package/dist/realtime/realtime_model.d.ts.map +1 -1
- package/dist/realtime/realtime_model.js +27 -2
- package/dist/realtime/realtime_model.js.map +1 -1
- package/dist/responses/llm.cjs +10 -0
- package/dist/responses/llm.cjs.map +1 -1
- package/dist/responses/llm.d.ts.map +1 -1
- package/dist/responses/llm.js +10 -0
- package/dist/responses/llm.js.map +1 -1
- package/package.json +5 -5
- package/src/llm.ts +1 -1
- package/src/realtime/realtime_model.ts +37 -2
- package/src/responses/llm.ts +10 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/responses/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { APIConnectOptions } from '@livekit/agents';\nimport {\n APIConnectionError,\n APIStatusError,\n APITimeoutError,\n DEFAULT_API_CONNECT_OPTIONS,\n llm,\n log,\n toError,\n} from '@livekit/agents';\nimport OpenAI from 'openai';\nimport type { ChatModels } from '../models.js';\nimport { WSLLM } from '../ws/llm.js';\n\nexport interface LLMOptions {\n model: string | ChatModels;\n apiKey?: string;\n baseURL?: string;\n client?: OpenAI;\n temperature?: number;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n store?: boolean;\n metadata?: Record<string, string>;\n strictToolSchema?: boolean;\n\n /**\n * Whether to use the WebSocket API.\n * @default true\n */\n useWebSocket?: boolean;\n}\n\ntype HttpLLMOptions = Omit<LLMOptions, 'useWebSocket'>;\n\nconst defaultLLMOptions: LLMOptions = {\n model: 'gpt-4.1',\n apiKey: process.env.OPENAI_API_KEY,\n strictToolSchema: true,\n useWebSocket: true,\n};\n\nclass ResponsesHttpLLM extends llm.LLM {\n #client: OpenAI;\n #opts: HttpLLMOptions;\n\n constructor(opts: Partial<HttpLLMOptions> = defaultLLMOptions) {\n super();\n\n this.#opts = { ...defaultLLMOptions, ...opts };\n if (this.#opts.apiKey === undefined && this.#opts.client === undefined) {\n throw new Error('OpenAI API key is required, whether as an argument or as $OPENAI_API_KEY');\n }\n\n this.#client =\n this.#opts.client ||\n new OpenAI({\n baseURL: this.#opts.baseURL,\n apiKey: this.#opts.apiKey,\n });\n }\n\n override label(): string {\n return 'openai.responses.LLM';\n }\n\n override get model(): string {\n return this.#opts.model;\n }\n\n override chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): ResponsesHttpLLMStream {\n const modelOptions: Record<string, unknown> = { ...(extraKwargs || {}) };\n\n parallelToolCalls =\n parallelToolCalls !== undefined ? parallelToolCalls : this.#opts.parallelToolCalls;\n\n if (toolCtx && Object.keys(toolCtx).length > 0 && parallelToolCalls !== undefined) {\n modelOptions.parallel_tool_calls = parallelToolCalls;\n }\n\n toolChoice =\n toolChoice !== undefined ? toolChoice : (this.#opts.toolChoice as llm.ToolChoice | undefined);\n\n if (toolChoice) {\n modelOptions.tool_choice = toolChoice;\n }\n\n if (this.#opts.temperature !== undefined) {\n modelOptions.temperature = this.#opts.temperature;\n }\n\n if (this.#opts.store !== undefined) {\n modelOptions.store = this.#opts.store;\n }\n\n if (this.#opts.metadata) {\n modelOptions.metadata = this.#opts.metadata;\n }\n\n return new ResponsesHttpLLMStream(this, {\n model: this.#opts.model,\n client: this.#client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema: this.#opts.strictToolSchema ?? true,\n });\n }\n}\n\nclass ResponsesHttpLLMStream extends llm.LLMStream {\n private model: string | ChatModels;\n private client: OpenAI;\n private modelOptions: Record<string, unknown>;\n private strictToolSchema: boolean;\n private responseId: string;\n\n constructor(\n llm: ResponsesHttpLLM,\n {\n model,\n client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema,\n }: {\n model: string | ChatModels;\n client: OpenAI;\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions: APIConnectOptions;\n modelOptions: Record<string, unknown>;\n strictToolSchema: boolean;\n },\n ) {\n super(llm, { chatCtx, toolCtx, connOptions });\n this.model = model;\n this.client = client;\n this.modelOptions = modelOptions;\n this.strictToolSchema = strictToolSchema;\n this.responseId = '';\n }\n\n protected async run(): Promise<void> {\n let retryable = true;\n\n try {\n const messages = (await this.chatCtx.toProviderFormat(\n 'openai.responses',\n )) as OpenAI.Responses.ResponseInputItem[];\n\n const tools = this.toolCtx\n ? Object.entries(this.toolCtx).map(([name, func]) => {\n const oaiParams = {\n type: 'function' as const,\n name: name,\n description: func.description,\n parameters: llm.toJsonSchema(\n func.parameters,\n true,\n this.strictToolSchema,\n ) as unknown as OpenAI.Responses.FunctionTool['parameters'],\n } as OpenAI.Responses.FunctionTool;\n\n if (this.strictToolSchema) {\n oaiParams.strict = true;\n }\n\n return oaiParams;\n })\n : undefined;\n\n const requestOptions: Record<string, unknown> = { ...this.modelOptions };\n if (!tools) {\n delete requestOptions.tool_choice;\n }\n\n const stream = await this.client.responses.create(\n {\n model: this.model,\n input: messages,\n tools: tools,\n stream: true,\n ...requestOptions,\n },\n {\n timeout: this.connOptions.timeoutMs,\n },\n );\n\n for await (const event of stream) {\n retryable = false;\n let chunk: llm.ChatChunk | undefined;\n\n switch (event.type) {\n case 'error':\n this.handleError(event);\n break;\n case 'response.created':\n this.handleResponseCreated(event);\n break;\n case 'response.output_item.done':\n chunk = this.handleResponseOutputItemDone(event);\n break;\n case 'response.output_text.delta':\n chunk = this.handleResponseOutputTextDelta(event);\n break;\n case 'response.completed':\n chunk = this.handleResponseCompleted(event);\n break;\n }\n\n if (chunk) {\n this.queue.put(chunk);\n }\n }\n } catch (error) {\n if (\n error instanceof APIStatusError ||\n error instanceof APITimeoutError ||\n error instanceof APIConnectionError\n ) {\n throw error;\n } else if (error instanceof OpenAI.APIConnectionTimeoutError) {\n throw new APITimeoutError({ options: { retryable } });\n } else if (error instanceof OpenAI.APIError) {\n throw new APIStatusError({\n message: error.message,\n options: {\n statusCode: error.status,\n body: error.error,\n requestId: error.requestID,\n retryable,\n },\n });\n } else {\n throw new APIConnectionError({\n message: toError(error).message,\n options: { retryable },\n });\n }\n }\n }\n\n private handleError(event: OpenAI.Responses.ResponseErrorEvent): void {\n throw new APIStatusError({\n message: event.message,\n options: {\n statusCode: -1,\n retryable: false,\n },\n });\n }\n\n private handleResponseCreated(event: OpenAI.Responses.ResponseCreatedEvent): void {\n this.responseId = event.response.id;\n }\n\n private handleResponseOutputItemDone(\n event: OpenAI.Responses.ResponseOutputItemDoneEvent,\n ): llm.ChatChunk | undefined {\n let chunk: llm.ChatChunk | undefined;\n\n if (event.item.type === 'function_call') {\n chunk = {\n id: this.responseId,\n delta: {\n role: 'assistant',\n content: undefined,\n toolCalls: [\n llm.FunctionCall.create({\n callId: event.item.call_id || '',\n name: event.item.name,\n args: event.item.arguments,\n }),\n ],\n },\n };\n }\n return chunk;\n }\n\n private handleResponseOutputTextDelta(\n event: OpenAI.Responses.ResponseTextDeltaEvent,\n ): llm.ChatChunk {\n return {\n id: this.responseId,\n delta: {\n role: 'assistant',\n content: event.delta,\n },\n };\n }\n\n private handleResponseCompleted(\n event: OpenAI.Responses.ResponseCompletedEvent,\n ): llm.ChatChunk | undefined {\n if (event.response.usage) {\n return {\n id: this.responseId,\n usage: {\n completionTokens: event.response.usage.output_tokens,\n promptTokens: event.response.usage.input_tokens,\n promptCachedTokens: event.response.usage.input_tokens_details.cached_tokens,\n totalTokens: event.response.usage.total_tokens,\n },\n };\n }\n return undefined;\n }\n}\n\nexport class LLM extends llm.LLM {\n #opts: LLMOptions;\n #llm: llm.LLM;\n #logger = log();\n\n /**\n * Create a new instance of OpenAI Responses LLM.\n *\n * @remarks\n * `apiKey` must be set to your OpenAI API key, either using the argument or by setting the\n * `OPENAI_API_KEY` environment variable.\n */\n constructor(opts: Partial<LLMOptions> = defaultLLMOptions) {\n super();\n\n this.#opts = { ...defaultLLMOptions, ...opts };\n const { useWebSocket, client, ...baseOpts } = this.#opts;\n\n if (useWebSocket) {\n if (client !== undefined) {\n this.#logger.warn(\n 'WebSocket mode does not support custom client; provided client will be ignored',\n );\n }\n this.#llm = new WSLLM(baseOpts);\n } else {\n this.#llm = new ResponsesHttpLLM({ ...baseOpts, client });\n }\n\n // Forward events from the inner delegate so consumers listening on this\n // wrapper instance (e.g. AgentActivity) receive them.\n this.#llm.on('metrics_collected', (metrics) => this.emit('metrics_collected', metrics));\n this.#llm.on('error', (error) => this.emit('error', error));\n }\n\n override label(): string {\n return this.#llm.label();\n }\n\n override get model(): string {\n return this.#llm.model;\n }\n\n override prewarm(): void {\n this.#llm.prewarm();\n }\n\n // Ref: python livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/responses/llm.py - 229-233 lines\n override async aclose(): Promise<void> {\n await this.#llm.aclose();\n }\n\n async close(): Promise<void> {\n await this.aclose();\n }\n\n override chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): llm.LLMStream {\n return this.#llm.chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n });\n }\n}\n"],"mappings":"AAIA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,OAAO,YAAY;AAEnB,SAAS,aAAa;AAuBtB,MAAM,oBAAgC;AAAA,EACpC,OAAO;AAAA,EACP,QAAQ,QAAQ,IAAI;AAAA,EACpB,kBAAkB;AAAA,EAClB,cAAc;AAChB;AAEA,MAAM,yBAAyB,IAAI,IAAI;AAAA,EACrC;AAAA,EACA;AAAA,EAEA,YAAY,OAAgC,mBAAmB;AAC7D,UAAM;AAEN,SAAK,QAAQ,EAAE,GAAG,mBAAmB,GAAG,KAAK;AAC7C,QAAI,KAAK,MAAM,WAAW,UAAa,KAAK,MAAM,WAAW,QAAW;AACtE,YAAM,IAAI,MAAM,0EAA0E;AAAA,IAC5F;AAEA,SAAK,UACH,KAAK,MAAM,UACX,IAAI,OAAO;AAAA,MACT,SAAS,KAAK,MAAM;AAAA,MACpB,QAAQ,KAAK,MAAM;AAAA,IACrB,CAAC;AAAA,EACL;AAAA,EAES,QAAgB;AACvB,WAAO;AAAA,EACT;AAAA,EAEA,IAAa,QAAgB;AAC3B,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA,EAES,KAAK;AAAA,IACZ;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAO2B;AACzB,UAAM,eAAwC,EAAE,GAAI,eAAe,CAAC,EAAG;AAEvE,wBACE,sBAAsB,SAAY,oBAAoB,KAAK,MAAM;AAEnE,QAAI,WAAW,OAAO,KAAK,OAAO,EAAE,SAAS,KAAK,sBAAsB,QAAW;AACjF,mBAAa,sBAAsB;AAAA,IACrC;AAEA,iBACE,eAAe,SAAY,aAAc,KAAK,MAAM;AAEtD,QAAI,YAAY;AACd,mBAAa,cAAc;AAAA,IAC7B;AAEA,QAAI,KAAK,MAAM,gBAAgB,QAAW;AACxC,mBAAa,cAAc,KAAK,MAAM;AAAA,IACxC;AAEA,QAAI,KAAK,MAAM,UAAU,QAAW;AAClC,mBAAa,QAAQ,KAAK,MAAM;AAAA,IAClC;AAEA,QAAI,KAAK,MAAM,UAAU;AACvB,mBAAa,WAAW,KAAK,MAAM;AAAA,IACrC;AAEA,WAAO,IAAI,uBAAuB,MAAM;AAAA,MACtC,OAAO,KAAK,MAAM;AAAA,MAClB,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB,KAAK,MAAM,oBAAoB;AAAA,IACnD,CAAC;AAAA,EACH;AACF;AAEA,MAAM,+BAA+B,IAAI,UAAU;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACEA,MACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GASA;AACA,UAAMA,MAAK,EAAE,SAAS,SAAS,YAAY,CAAC;AAC5C,SAAK,QAAQ;AACb,SAAK,SAAS;AACd,SAAK,eAAe;AACpB,SAAK,mBAAmB;AACxB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,MAAgB,MAAqB;AACnC,QAAI,YAAY;AAEhB,QAAI;AACF,YAAM,WAAY,MAAM,KAAK,QAAQ;AAAA,QACnC;AAAA,MACF;AAEA,YAAM,QAAQ,KAAK,UACf,OAAO,QAAQ,KAAK,OAAO,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM;AACjD,cAAM,YAAY;AAAA,UAChB,MAAM;AAAA,UACN;AAAA,UACA,aAAa,KAAK;AAAA,UAClB,YAAY,IAAI;AAAA,YACd,KAAK;AAAA,YACL;AAAA,YACA,KAAK;AAAA,UACP;AAAA,QACF;AAEA,YAAI,KAAK,kBAAkB;AACzB,oBAAU,SAAS;AAAA,QACrB;AAEA,eAAO;AAAA,MACT,CAAC,IACD;AAEJ,YAAM,iBAA0C,EAAE,GAAG,KAAK,aAAa;AACvE,UAAI,CAAC,OAAO;AACV,eAAO,eAAe;AAAA,MACxB;AAEA,YAAM,SAAS,MAAM,KAAK,OAAO,UAAU;AAAA,QACzC;AAAA,UACE,OAAO,KAAK;AAAA,UACZ,OAAO;AAAA,UACP;AAAA,UACA,QAAQ;AAAA,UACR,GAAG;AAAA,QACL;AAAA,QACA;AAAA,UACE,SAAS,KAAK,YAAY;AAAA,QAC5B;AAAA,MACF;AAEA,uBAAiB,SAAS,QAAQ;AAChC,oBAAY;AACZ,YAAI;AAEJ,gBAAQ,MAAM,MAAM;AAAA,UAClB,KAAK;AACH,iBAAK,YAAY,KAAK;AACtB;AAAA,UACF,KAAK;AACH,iBAAK,sBAAsB,KAAK;AAChC;AAAA,UACF,KAAK;AACH,oBAAQ,KAAK,6BAA6B,KAAK;AAC/C;AAAA,UACF,KAAK;AACH,oBAAQ,KAAK,8BAA8B,KAAK;AAChD;AAAA,UACF,KAAK;AACH,oBAAQ,KAAK,wBAAwB,KAAK;AAC1C;AAAA,QACJ;AAEA,YAAI,OAAO;AACT,eAAK,MAAM,IAAI,KAAK;AAAA,QACtB;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UACE,iBAAiB,kBACjB,iBAAiB,mBACjB,iBAAiB,oBACjB;AACA,cAAM;AAAA,MACR,WAAW,iBAAiB,OAAO,2BAA2B;AAC5D,cAAM,IAAI,gBAAgB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AAAA,MACtD,WAAW,iBAAiB,OAAO,UAAU;AAC3C,cAAM,IAAI,eAAe;AAAA,UACvB,SAAS,MAAM;AAAA,UACf,SAAS;AAAA,YACP,YAAY,MAAM;AAAA,YAClB,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,YACjB;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS,QAAQ,KAAK,EAAE;AAAA,UACxB,SAAS,EAAE,UAAU;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,YAAY,OAAkD;AACpE,UAAM,IAAI,eAAe;AAAA,MACvB,SAAS,MAAM;AAAA,MACf,SAAS;AAAA,QACP,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEQ,sBAAsB,OAAoD;AAChF,SAAK,aAAa,MAAM,SAAS;AAAA,EACnC;AAAA,EAEQ,6BACN,OAC2B;AAC3B,QAAI;AAEJ,QAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,cAAQ;AAAA,QACN,IAAI,KAAK;AAAA,QACT,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,UACT,WAAW;AAAA,YACT,IAAI,aAAa,OAAO;AAAA,cACtB,QAAQ,MAAM,KAAK,WAAW;AAAA,cAC9B,MAAM,MAAM,KAAK;AAAA,cACjB,MAAM,MAAM,KAAK;AAAA,YACnB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,8BACN,OACe;AACf,WAAO;AAAA,MACL,IAAI,KAAK;AAAA,MACT,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,wBACN,OAC2B;AAC3B,QAAI,MAAM,SAAS,OAAO;AACxB,aAAO;AAAA,QACL,IAAI,KAAK;AAAA,QACT,OAAO;AAAA,UACL,kBAAkB,MAAM,SAAS,MAAM;AAAA,UACvC,cAAc,MAAM,SAAS,MAAM;AAAA,UACnC,oBAAoB,MAAM,SAAS,MAAM,qBAAqB;AAAA,UAC9D,aAAa,MAAM,SAAS,MAAM;AAAA,QACpC;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAEO,MAAM,YAAY,IAAI,IAAI;AAAA,EAC/B;AAAA,EACA;AAAA,EACA,UAAU,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASd,YAAY,OAA4B,mBAAmB;AACzD,UAAM;AAEN,SAAK,QAAQ,EAAE,GAAG,mBAAmB,GAAG,KAAK;AAC7C,UAAM,EAAE,cAAc,QAAQ,GAAG,SAAS,IAAI,KAAK;AAEnD,QAAI,cAAc;AAChB,UAAI,WAAW,QAAW;AACxB,aAAK,QAAQ;AAAA,UACX;AAAA,QACF;AAAA,MACF;AACA,WAAK,OAAO,IAAI,MAAM,QAAQ;AAAA,IAChC,OAAO;AACL,WAAK,OAAO,IAAI,iBAAiB,EAAE,GAAG,UAAU,OAAO,CAAC;AAAA,IAC1D;AAIA,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY,KAAK,KAAK,qBAAqB,OAAO,CAAC;AACtF,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU,KAAK,KAAK,SAAS,KAAK,CAAC;AAAA,EAC5D;AAAA,EAES,QAAgB;AACvB,WAAO,KAAK,KAAK,MAAM;AAAA,EACzB;AAAA,EAEA,IAAa,QAAgB;AAC3B,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA,EAES,UAAgB;AACvB,SAAK,KAAK,QAAQ;AAAA,EACpB;AAAA;AAAA,EAGA,MAAe,SAAwB;AACrC,UAAM,KAAK,KAAK,OAAO;AAAA,EACzB;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,OAAO;AAAA,EACpB;AAAA,EAES,KAAK;AAAA,IACZ;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAOkB;AAChB,WAAO,KAAK,KAAK,KAAK;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;","names":["llm"]}
|
|
1
|
+
{"version":3,"sources":["../../src/responses/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { APIConnectOptions } from '@livekit/agents';\nimport {\n APIConnectionError,\n APIStatusError,\n APITimeoutError,\n DEFAULT_API_CONNECT_OPTIONS,\n llm,\n log,\n toError,\n} from '@livekit/agents';\nimport OpenAI from 'openai';\nimport type { ChatModels } from '../models.js';\nimport { WSLLM } from '../ws/llm.js';\n\nexport interface LLMOptions {\n model: string | ChatModels;\n apiKey?: string;\n baseURL?: string;\n client?: OpenAI;\n temperature?: number;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n store?: boolean;\n metadata?: Record<string, string>;\n strictToolSchema?: boolean;\n\n /**\n * Whether to use the WebSocket API.\n * @default true\n */\n useWebSocket?: boolean;\n}\n\ntype HttpLLMOptions = Omit<LLMOptions, 'useWebSocket'>;\n\nconst defaultLLMOptions: LLMOptions = {\n model: 'gpt-4.1',\n apiKey: process.env.OPENAI_API_KEY,\n strictToolSchema: true,\n useWebSocket: true,\n};\n\nclass ResponsesHttpLLM extends llm.LLM {\n #client: OpenAI;\n #opts: HttpLLMOptions;\n\n constructor(opts: Partial<HttpLLMOptions> = defaultLLMOptions) {\n super();\n\n this.#opts = { ...defaultLLMOptions, ...opts };\n if (this.#opts.apiKey === undefined && this.#opts.client === undefined) {\n throw new Error('OpenAI API key is required, whether as an argument or as $OPENAI_API_KEY');\n }\n\n this.#client =\n this.#opts.client ||\n new OpenAI({\n baseURL: this.#opts.baseURL,\n apiKey: this.#opts.apiKey,\n });\n }\n\n override label(): string {\n return 'openai.responses.LLM';\n }\n\n override get model(): string {\n return this.#opts.model;\n }\n\n override chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): ResponsesHttpLLMStream {\n const modelOptions: Record<string, unknown> = { ...(extraKwargs || {}) };\n\n parallelToolCalls =\n parallelToolCalls !== undefined ? parallelToolCalls : this.#opts.parallelToolCalls;\n\n if (toolCtx && Object.keys(toolCtx).length > 0 && parallelToolCalls !== undefined) {\n modelOptions.parallel_tool_calls = parallelToolCalls;\n }\n\n toolChoice =\n toolChoice !== undefined ? toolChoice : (this.#opts.toolChoice as llm.ToolChoice | undefined);\n\n if (toolChoice) {\n modelOptions.tool_choice = toolChoice;\n }\n\n if (this.#opts.temperature !== undefined) {\n modelOptions.temperature = this.#opts.temperature;\n }\n\n if (this.#opts.store !== undefined) {\n modelOptions.store = this.#opts.store;\n }\n\n if (this.#opts.metadata) {\n modelOptions.metadata = this.#opts.metadata;\n }\n\n return new ResponsesHttpLLMStream(this, {\n model: this.#opts.model,\n client: this.#client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema: this.#opts.strictToolSchema ?? true,\n });\n }\n}\n\nclass ResponsesHttpLLMStream extends llm.LLMStream {\n private model: string | ChatModels;\n private client: OpenAI;\n private modelOptions: Record<string, unknown>;\n private strictToolSchema: boolean;\n private responseId: string;\n\n constructor(\n llm: ResponsesHttpLLM,\n {\n model,\n client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema,\n }: {\n model: string | ChatModels;\n client: OpenAI;\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions: APIConnectOptions;\n modelOptions: Record<string, unknown>;\n strictToolSchema: boolean;\n },\n ) {\n super(llm, { chatCtx, toolCtx, connOptions });\n this.model = model;\n this.client = client;\n this.modelOptions = modelOptions;\n this.strictToolSchema = strictToolSchema;\n this.responseId = '';\n }\n\n protected async run(): Promise<void> {\n let retryable = true;\n\n try {\n const messages = (await this.chatCtx.toProviderFormat(\n 'openai.responses',\n )) as OpenAI.Responses.ResponseInputItem[];\n\n const tools = this.toolCtx\n ? Object.entries(this.toolCtx).map(([name, func]) => {\n const oaiParams = {\n type: 'function' as const,\n name: name,\n description: func.description,\n parameters: llm.toJsonSchema(\n func.parameters,\n true,\n this.strictToolSchema,\n ) as unknown as OpenAI.Responses.FunctionTool['parameters'],\n } as OpenAI.Responses.FunctionTool;\n\n if (this.strictToolSchema) {\n oaiParams.strict = true;\n }\n\n return oaiParams;\n })\n : undefined;\n\n const requestOptions: Record<string, unknown> = { ...this.modelOptions };\n if (!tools) {\n delete requestOptions.tool_choice;\n }\n\n const stream = await this.client.responses.create(\n {\n model: this.model,\n input: messages,\n tools: tools,\n stream: true,\n ...requestOptions,\n },\n {\n timeout: this.connOptions.timeoutMs,\n },\n );\n\n for await (const event of stream) {\n retryable = false;\n let chunk: llm.ChatChunk | undefined;\n\n switch (event.type) {\n case 'error':\n this.handleError(event);\n break;\n case 'response.created':\n this.handleResponseCreated(event);\n break;\n case 'response.output_item.done':\n chunk = this.handleResponseOutputItemDone(event);\n break;\n case 'response.output_text.delta':\n chunk = this.handleResponseOutputTextDelta(event);\n break;\n case 'response.completed':\n chunk = this.handleResponseCompleted(event);\n break;\n case 'response.failed':\n this.handleResponseFailed(event);\n break;\n }\n\n if (chunk) {\n this.queue.put(chunk);\n }\n }\n } catch (error) {\n if (\n error instanceof APIStatusError ||\n error instanceof APITimeoutError ||\n error instanceof APIConnectionError\n ) {\n throw error;\n } else if (error instanceof OpenAI.APIConnectionTimeoutError) {\n throw new APITimeoutError({ options: { retryable } });\n } else if (error instanceof OpenAI.APIError) {\n throw new APIStatusError({\n message: error.message,\n options: {\n statusCode: error.status,\n body: error.error,\n requestId: error.requestID,\n retryable,\n },\n });\n } else {\n throw new APIConnectionError({\n message: toError(error).message,\n options: { retryable },\n });\n }\n }\n }\n\n private handleError(event: OpenAI.Responses.ResponseErrorEvent): void {\n throw new APIStatusError({\n message: event.message,\n options: {\n statusCode: -1,\n retryable: false,\n },\n });\n }\n\n private handleResponseFailed(event: OpenAI.Responses.ResponseFailedEvent): void {\n throw new APIStatusError({\n message: event.response.error?.message ?? 'Response failed',\n options: { statusCode: -1, retryable: false },\n });\n }\n\n private handleResponseCreated(event: OpenAI.Responses.ResponseCreatedEvent): void {\n this.responseId = event.response.id;\n }\n\n private handleResponseOutputItemDone(\n event: OpenAI.Responses.ResponseOutputItemDoneEvent,\n ): llm.ChatChunk | undefined {\n let chunk: llm.ChatChunk | undefined;\n\n if (event.item.type === 'function_call') {\n chunk = {\n id: this.responseId,\n delta: {\n role: 'assistant',\n content: undefined,\n toolCalls: [\n llm.FunctionCall.create({\n callId: event.item.call_id || '',\n name: event.item.name,\n args: event.item.arguments,\n }),\n ],\n },\n };\n }\n return chunk;\n }\n\n private handleResponseOutputTextDelta(\n event: OpenAI.Responses.ResponseTextDeltaEvent,\n ): llm.ChatChunk {\n return {\n id: this.responseId,\n delta: {\n role: 'assistant',\n content: event.delta,\n },\n };\n }\n\n private handleResponseCompleted(\n event: OpenAI.Responses.ResponseCompletedEvent,\n ): llm.ChatChunk | undefined {\n if (event.response.usage) {\n return {\n id: this.responseId,\n usage: {\n completionTokens: event.response.usage.output_tokens,\n promptTokens: event.response.usage.input_tokens,\n promptCachedTokens: event.response.usage.input_tokens_details.cached_tokens,\n totalTokens: event.response.usage.total_tokens,\n },\n };\n }\n return undefined;\n }\n}\n\nexport class LLM extends llm.LLM {\n #opts: LLMOptions;\n #llm: llm.LLM;\n #logger = log();\n\n /**\n * Create a new instance of OpenAI Responses LLM.\n *\n * @remarks\n * `apiKey` must be set to your OpenAI API key, either using the argument or by setting the\n * `OPENAI_API_KEY` environment variable.\n */\n constructor(opts: Partial<LLMOptions> = defaultLLMOptions) {\n super();\n\n this.#opts = { ...defaultLLMOptions, ...opts };\n const { useWebSocket, client, ...baseOpts } = this.#opts;\n\n if (useWebSocket) {\n if (client !== undefined) {\n this.#logger.warn(\n 'WebSocket mode does not support custom client; provided client will be ignored',\n );\n }\n this.#llm = new WSLLM(baseOpts);\n } else {\n this.#llm = new ResponsesHttpLLM({ ...baseOpts, client });\n }\n\n // Forward events from the inner delegate so consumers listening on this\n // wrapper instance (e.g. AgentActivity) receive them.\n this.#llm.on('metrics_collected', (metrics) => this.emit('metrics_collected', metrics));\n this.#llm.on('error', (error) => this.emit('error', error));\n }\n\n override label(): string {\n return this.#llm.label();\n }\n\n override get model(): string {\n return this.#llm.model;\n }\n\n override prewarm(): void {\n this.#llm.prewarm();\n }\n\n // Ref: python livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/responses/llm.py - 229-233 lines\n override async aclose(): Promise<void> {\n await this.#llm.aclose();\n }\n\n async close(): Promise<void> {\n await this.aclose();\n }\n\n override chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): llm.LLMStream {\n return this.#llm.chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n });\n }\n}\n"],"mappings":"AAIA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,OAAO,YAAY;AAEnB,SAAS,aAAa;AAuBtB,MAAM,oBAAgC;AAAA,EACpC,OAAO;AAAA,EACP,QAAQ,QAAQ,IAAI;AAAA,EACpB,kBAAkB;AAAA,EAClB,cAAc;AAChB;AAEA,MAAM,yBAAyB,IAAI,IAAI;AAAA,EACrC;AAAA,EACA;AAAA,EAEA,YAAY,OAAgC,mBAAmB;AAC7D,UAAM;AAEN,SAAK,QAAQ,EAAE,GAAG,mBAAmB,GAAG,KAAK;AAC7C,QAAI,KAAK,MAAM,WAAW,UAAa,KAAK,MAAM,WAAW,QAAW;AACtE,YAAM,IAAI,MAAM,0EAA0E;AAAA,IAC5F;AAEA,SAAK,UACH,KAAK,MAAM,UACX,IAAI,OAAO;AAAA,MACT,SAAS,KAAK,MAAM;AAAA,MACpB,QAAQ,KAAK,MAAM;AAAA,IACrB,CAAC;AAAA,EACL;AAAA,EAES,QAAgB;AACvB,WAAO;AAAA,EACT;AAAA,EAEA,IAAa,QAAgB;AAC3B,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA,EAES,KAAK;AAAA,IACZ;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAO2B;AACzB,UAAM,eAAwC,EAAE,GAAI,eAAe,CAAC,EAAG;AAEvE,wBACE,sBAAsB,SAAY,oBAAoB,KAAK,MAAM;AAEnE,QAAI,WAAW,OAAO,KAAK,OAAO,EAAE,SAAS,KAAK,sBAAsB,QAAW;AACjF,mBAAa,sBAAsB;AAAA,IACrC;AAEA,iBACE,eAAe,SAAY,aAAc,KAAK,MAAM;AAEtD,QAAI,YAAY;AACd,mBAAa,cAAc;AAAA,IAC7B;AAEA,QAAI,KAAK,MAAM,gBAAgB,QAAW;AACxC,mBAAa,cAAc,KAAK,MAAM;AAAA,IACxC;AAEA,QAAI,KAAK,MAAM,UAAU,QAAW;AAClC,mBAAa,QAAQ,KAAK,MAAM;AAAA,IAClC;AAEA,QAAI,KAAK,MAAM,UAAU;AACvB,mBAAa,WAAW,KAAK,MAAM;AAAA,IACrC;AAEA,WAAO,IAAI,uBAAuB,MAAM;AAAA,MACtC,OAAO,KAAK,MAAM;AAAA,MAClB,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB,KAAK,MAAM,oBAAoB;AAAA,IACnD,CAAC;AAAA,EACH;AACF;AAEA,MAAM,+BAA+B,IAAI,UAAU;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACEA,MACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GASA;AACA,UAAMA,MAAK,EAAE,SAAS,SAAS,YAAY,CAAC;AAC5C,SAAK,QAAQ;AACb,SAAK,SAAS;AACd,SAAK,eAAe;AACpB,SAAK,mBAAmB;AACxB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,MAAgB,MAAqB;AACnC,QAAI,YAAY;AAEhB,QAAI;AACF,YAAM,WAAY,MAAM,KAAK,QAAQ;AAAA,QACnC;AAAA,MACF;AAEA,YAAM,QAAQ,KAAK,UACf,OAAO,QAAQ,KAAK,OAAO,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM;AACjD,cAAM,YAAY;AAAA,UAChB,MAAM;AAAA,UACN;AAAA,UACA,aAAa,KAAK;AAAA,UAClB,YAAY,IAAI;AAAA,YACd,KAAK;AAAA,YACL;AAAA,YACA,KAAK;AAAA,UACP;AAAA,QACF;AAEA,YAAI,KAAK,kBAAkB;AACzB,oBAAU,SAAS;AAAA,QACrB;AAEA,eAAO;AAAA,MACT,CAAC,IACD;AAEJ,YAAM,iBAA0C,EAAE,GAAG,KAAK,aAAa;AACvE,UAAI,CAAC,OAAO;AACV,eAAO,eAAe;AAAA,MACxB;AAEA,YAAM,SAAS,MAAM,KAAK,OAAO,UAAU;AAAA,QACzC;AAAA,UACE,OAAO,KAAK;AAAA,UACZ,OAAO;AAAA,UACP;AAAA,UACA,QAAQ;AAAA,UACR,GAAG;AAAA,QACL;AAAA,QACA;AAAA,UACE,SAAS,KAAK,YAAY;AAAA,QAC5B;AAAA,MACF;AAEA,uBAAiB,SAAS,QAAQ;AAChC,oBAAY;AACZ,YAAI;AAEJ,gBAAQ,MAAM,MAAM;AAAA,UAClB,KAAK;AACH,iBAAK,YAAY,KAAK;AACtB;AAAA,UACF,KAAK;AACH,iBAAK,sBAAsB,KAAK;AAChC;AAAA,UACF,KAAK;AACH,oBAAQ,KAAK,6BAA6B,KAAK;AAC/C;AAAA,UACF,KAAK;AACH,oBAAQ,KAAK,8BAA8B,KAAK;AAChD;AAAA,UACF,KAAK;AACH,oBAAQ,KAAK,wBAAwB,KAAK;AAC1C;AAAA,UACF,KAAK;AACH,iBAAK,qBAAqB,KAAK;AAC/B;AAAA,QACJ;AAEA,YAAI,OAAO;AACT,eAAK,MAAM,IAAI,KAAK;AAAA,QACtB;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UACE,iBAAiB,kBACjB,iBAAiB,mBACjB,iBAAiB,oBACjB;AACA,cAAM;AAAA,MACR,WAAW,iBAAiB,OAAO,2BAA2B;AAC5D,cAAM,IAAI,gBAAgB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AAAA,MACtD,WAAW,iBAAiB,OAAO,UAAU;AAC3C,cAAM,IAAI,eAAe;AAAA,UACvB,SAAS,MAAM;AAAA,UACf,SAAS;AAAA,YACP,YAAY,MAAM;AAAA,YAClB,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,YACjB;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS,QAAQ,KAAK,EAAE;AAAA,UACxB,SAAS,EAAE,UAAU;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,YAAY,OAAkD;AACpE,UAAM,IAAI,eAAe;AAAA,MACvB,SAAS,MAAM;AAAA,MACf,SAAS;AAAA,QACP,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEQ,qBAAqB,OAAmD;AArRlF;AAsRI,UAAM,IAAI,eAAe;AAAA,MACvB,WAAS,WAAM,SAAS,UAAf,mBAAsB,YAAW;AAAA,MAC1C,SAAS,EAAE,YAAY,IAAI,WAAW,MAAM;AAAA,IAC9C,CAAC;AAAA,EACH;AAAA,EAEQ,sBAAsB,OAAoD;AAChF,SAAK,aAAa,MAAM,SAAS;AAAA,EACnC;AAAA,EAEQ,6BACN,OAC2B;AAC3B,QAAI;AAEJ,QAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,cAAQ;AAAA,QACN,IAAI,KAAK;AAAA,QACT,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,UACT,WAAW;AAAA,YACT,IAAI,aAAa,OAAO;AAAA,cACtB,QAAQ,MAAM,KAAK,WAAW;AAAA,cAC9B,MAAM,MAAM,KAAK;AAAA,cACjB,MAAM,MAAM,KAAK;AAAA,YACnB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,8BACN,OACe;AACf,WAAO;AAAA,MACL,IAAI,KAAK;AAAA,MACT,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,wBACN,OAC2B;AAC3B,QAAI,MAAM,SAAS,OAAO;AACxB,aAAO;AAAA,QACL,IAAI,KAAK;AAAA,QACT,OAAO;AAAA,UACL,kBAAkB,MAAM,SAAS,MAAM;AAAA,UACvC,cAAc,MAAM,SAAS,MAAM;AAAA,UACnC,oBAAoB,MAAM,SAAS,MAAM,qBAAqB;AAAA,UAC9D,aAAa,MAAM,SAAS,MAAM;AAAA,QACpC;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAEO,MAAM,YAAY,IAAI,IAAI;AAAA,EAC/B;AAAA,EACA;AAAA,EACA,UAAU,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASd,YAAY,OAA4B,mBAAmB;AACzD,UAAM;AAEN,SAAK,QAAQ,EAAE,GAAG,mBAAmB,GAAG,KAAK;AAC7C,UAAM,EAAE,cAAc,QAAQ,GAAG,SAAS,IAAI,KAAK;AAEnD,QAAI,cAAc;AAChB,UAAI,WAAW,QAAW;AACxB,aAAK,QAAQ;AAAA,UACX;AAAA,QACF;AAAA,MACF;AACA,WAAK,OAAO,IAAI,MAAM,QAAQ;AAAA,IAChC,OAAO;AACL,WAAK,OAAO,IAAI,iBAAiB,EAAE,GAAG,UAAU,OAAO,CAAC;AAAA,IAC1D;AAIA,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY,KAAK,KAAK,qBAAqB,OAAO,CAAC;AACtF,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU,KAAK,KAAK,SAAS,KAAK,CAAC;AAAA,EAC5D;AAAA,EAES,QAAgB;AACvB,WAAO,KAAK,KAAK,MAAM;AAAA,EACzB;AAAA,EAEA,IAAa,QAAgB;AAC3B,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA,EAES,UAAgB;AACvB,SAAK,KAAK,QAAQ;AAAA,EACpB;AAAA;AAAA,EAGA,MAAe,SAAwB;AACrC,UAAM,KAAK,KAAK,OAAO;AAAA,EACzB;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,OAAO;AAAA,EACpB;AAAA,EAES,KAAK;AAAA,IACZ;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAOkB;AAChB,WAAO,KAAK,KAAK,KAAK;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;","names":["llm"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@livekit/agents-plugin-openai",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.3",
|
|
4
4
|
"description": "OpenAI plugin for LiveKit Node Agents",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"require": "dist/index.cjs",
|
|
@@ -30,9 +30,9 @@
|
|
|
30
30
|
"@types/ws": "^8.5.10",
|
|
31
31
|
"tsup": "^8.3.5",
|
|
32
32
|
"typescript": "^5.0.0",
|
|
33
|
-
"@livekit/agents
|
|
34
|
-
"@livekit/agents": "1.2.
|
|
35
|
-
"@livekit/agents-plugins-test": "1.2.
|
|
33
|
+
"@livekit/agents": "1.2.3",
|
|
34
|
+
"@livekit/agents-plugin-silero": "1.2.3",
|
|
35
|
+
"@livekit/agents-plugins-test": "1.2.3"
|
|
36
36
|
},
|
|
37
37
|
"dependencies": {
|
|
38
38
|
"@livekit/mutex": "^1.1.1",
|
|
@@ -42,7 +42,7 @@
|
|
|
42
42
|
"peerDependencies": {
|
|
43
43
|
"@livekit/rtc-node": "^0.13.24",
|
|
44
44
|
"zod": "^3.25.76 || ^4.1.8",
|
|
45
|
-
"@livekit/agents": "1.2.
|
|
45
|
+
"@livekit/agents": "1.2.3"
|
|
46
46
|
},
|
|
47
47
|
"scripts": {
|
|
48
48
|
"build": "tsup --onSuccess \"pnpm build:types\"",
|
package/src/llm.ts
CHANGED
|
@@ -521,7 +521,7 @@ export class LLM extends llm.LLM {
|
|
|
521
521
|
return new LLMStream(this as unknown as inference.LLM, {
|
|
522
522
|
model: this.#opts.model,
|
|
523
523
|
providerFmt: this.#providerFmt,
|
|
524
|
-
client: this.#client,
|
|
524
|
+
client: this.#client as any,
|
|
525
525
|
chatCtx,
|
|
526
526
|
toolCtx,
|
|
527
527
|
connOptions,
|
|
@@ -419,6 +419,12 @@ export class RealtimeSession extends llm.RealtimeSession {
|
|
|
419
419
|
this.sendEvent(this.createSessionUpdateEvent());
|
|
420
420
|
}
|
|
421
421
|
|
|
422
|
+
private hasInflightResponse(): boolean {
|
|
423
|
+
return (
|
|
424
|
+
this.currentGeneration !== undefined || Object.keys(this.responseCreatedFutures).length > 0
|
|
425
|
+
);
|
|
426
|
+
}
|
|
427
|
+
|
|
422
428
|
sendEvent(command: api_proto.ClientEvent): void {
|
|
423
429
|
this.messageChannel.put(command);
|
|
424
430
|
}
|
|
@@ -624,6 +630,13 @@ export class RealtimeSession extends llm.RealtimeSession {
|
|
|
624
630
|
}
|
|
625
631
|
|
|
626
632
|
updateOptions({ toolChoice }: { toolChoice?: llm.ToolChoice }): void {
|
|
633
|
+
const currentToolChoice = toOaiToolChoice(this.oaiRealtimeModel._options.toolChoice);
|
|
634
|
+
const nextToolChoice = toOaiToolChoice(toolChoice);
|
|
635
|
+
if (currentToolChoice === nextToolChoice) {
|
|
636
|
+
this.oaiRealtimeModel._options.toolChoice = toolChoice;
|
|
637
|
+
return;
|
|
638
|
+
}
|
|
639
|
+
|
|
627
640
|
const options: api_proto.SessionUpdateEvent['session'] = {
|
|
628
641
|
type: 'realtime',
|
|
629
642
|
};
|
|
@@ -677,6 +690,10 @@ export class RealtimeSession extends llm.RealtimeSession {
|
|
|
677
690
|
}
|
|
678
691
|
|
|
679
692
|
async interrupt(): Promise<void> {
|
|
693
|
+
if (!this.hasInflightResponse()) {
|
|
694
|
+
return;
|
|
695
|
+
}
|
|
696
|
+
|
|
680
697
|
this.sendEvent({
|
|
681
698
|
type: 'response.cancel',
|
|
682
699
|
} as api_proto.ResponseCancelEvent);
|
|
@@ -831,6 +848,13 @@ export class RealtimeSession extends llm.RealtimeSession {
|
|
|
831
848
|
}
|
|
832
849
|
this.itemDeleteFutures = {};
|
|
833
850
|
|
|
851
|
+
for (const handle of Object.values(this.responseCreatedFutures)) {
|
|
852
|
+
if (!handle.doneFut.done) {
|
|
853
|
+
handle.doneFut.reject(new Error('Session reconnected'));
|
|
854
|
+
}
|
|
855
|
+
}
|
|
856
|
+
this.responseCreatedFutures = {};
|
|
857
|
+
|
|
834
858
|
// Clear audio-capable item tracking - restored items are text-only on the server
|
|
835
859
|
this.audioCapableItemIds.clear();
|
|
836
860
|
|
|
@@ -1236,13 +1260,24 @@ export class RealtimeSession extends llm.RealtimeSession {
|
|
|
1236
1260
|
throw new Error('item.id is not set');
|
|
1237
1261
|
}
|
|
1238
1262
|
|
|
1263
|
+
const serverEventType = event.type as string;
|
|
1264
|
+
const incomingItem = openAIItemToLivekitItem(event.item);
|
|
1265
|
+
const existingItem = this.remoteChatCtx.get(event.item.id);
|
|
1266
|
+
const pendingCreateFuture = this.itemCreateFutures[event.item.id];
|
|
1267
|
+
if (existingItem && serverEventType === 'conversation.item.added' && !pendingCreateFuture) {
|
|
1268
|
+
// The server may emit a later input-audio-backed view of a user item whose
|
|
1269
|
+
// transcribed text variant we already inserted locally under the same ID.
|
|
1270
|
+
// Treat that as idempotent instead of surfacing a duplicate-ID error.
|
|
1271
|
+
return;
|
|
1272
|
+
}
|
|
1273
|
+
|
|
1239
1274
|
try {
|
|
1240
|
-
this.remoteChatCtx.insert(event.previous_item_id,
|
|
1275
|
+
this.remoteChatCtx.insert(event.previous_item_id, incomingItem);
|
|
1241
1276
|
} catch (error) {
|
|
1242
1277
|
this.#logger.error({ error, itemId: event.item.id }, 'failed to insert conversation item');
|
|
1243
1278
|
}
|
|
1244
1279
|
|
|
1245
|
-
const fut =
|
|
1280
|
+
const fut = pendingCreateFuture;
|
|
1246
1281
|
if (fut) {
|
|
1247
1282
|
fut.resolve();
|
|
1248
1283
|
delete this.itemCreateFutures[event.item.id];
|
package/src/responses/llm.ts
CHANGED
|
@@ -228,6 +228,9 @@ class ResponsesHttpLLMStream extends llm.LLMStream {
|
|
|
228
228
|
case 'response.completed':
|
|
229
229
|
chunk = this.handleResponseCompleted(event);
|
|
230
230
|
break;
|
|
231
|
+
case 'response.failed':
|
|
232
|
+
this.handleResponseFailed(event);
|
|
233
|
+
break;
|
|
231
234
|
}
|
|
232
235
|
|
|
233
236
|
if (chunk) {
|
|
@@ -272,6 +275,13 @@ class ResponsesHttpLLMStream extends llm.LLMStream {
|
|
|
272
275
|
});
|
|
273
276
|
}
|
|
274
277
|
|
|
278
|
+
private handleResponseFailed(event: OpenAI.Responses.ResponseFailedEvent): void {
|
|
279
|
+
throw new APIStatusError({
|
|
280
|
+
message: event.response.error?.message ?? 'Response failed',
|
|
281
|
+
options: { statusCode: -1, retryable: false },
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
|
|
275
285
|
private handleResponseCreated(event: OpenAI.Responses.ResponseCreatedEvent): void {
|
|
276
286
|
this.responseId = event.response.id;
|
|
277
287
|
}
|