@livekit/agents 1.0.34 → 1.0.36-dev.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (187) hide show
  1. package/dist/cli.cjs.map +1 -1
  2. package/dist/index.cjs +3 -1
  3. package/dist/index.cjs.map +1 -1
  4. package/dist/index.d.cts +1 -0
  5. package/dist/index.d.ts +1 -0
  6. package/dist/index.d.ts.map +1 -1
  7. package/dist/index.js +1 -0
  8. package/dist/index.js.map +1 -1
  9. package/dist/inference/api_protos.d.cts +4 -4
  10. package/dist/inference/api_protos.d.ts +4 -4
  11. package/dist/inference/interruption/AdaptiveInterruptionDetector.cjs +152 -0
  12. package/dist/inference/interruption/AdaptiveInterruptionDetector.cjs.map +1 -0
  13. package/dist/inference/interruption/AdaptiveInterruptionDetector.d.cts +50 -0
  14. package/dist/inference/interruption/AdaptiveInterruptionDetector.d.ts +50 -0
  15. package/dist/inference/interruption/AdaptiveInterruptionDetector.d.ts.map +1 -0
  16. package/dist/inference/interruption/AdaptiveInterruptionDetector.js +125 -0
  17. package/dist/inference/interruption/AdaptiveInterruptionDetector.js.map +1 -0
  18. package/dist/inference/interruption/InterruptionStream.cjs +310 -0
  19. package/dist/inference/interruption/InterruptionStream.cjs.map +1 -0
  20. package/dist/inference/interruption/InterruptionStream.d.cts +57 -0
  21. package/dist/inference/interruption/InterruptionStream.d.ts +57 -0
  22. package/dist/inference/interruption/InterruptionStream.d.ts.map +1 -0
  23. package/dist/inference/interruption/InterruptionStream.js +288 -0
  24. package/dist/inference/interruption/InterruptionStream.js.map +1 -0
  25. package/dist/inference/interruption/defaults.cjs +76 -0
  26. package/dist/inference/interruption/defaults.cjs.map +1 -0
  27. package/dist/inference/interruption/defaults.d.cts +14 -0
  28. package/dist/inference/interruption/defaults.d.ts +14 -0
  29. package/dist/inference/interruption/defaults.d.ts.map +1 -0
  30. package/dist/inference/interruption/defaults.js +42 -0
  31. package/dist/inference/interruption/defaults.js.map +1 -0
  32. package/dist/inference/interruption/errors.cjs +2 -0
  33. package/dist/inference/interruption/errors.cjs.map +1 -0
  34. package/dist/inference/interruption/errors.d.cts +2 -0
  35. package/dist/inference/interruption/errors.d.ts +2 -0
  36. package/dist/inference/interruption/errors.d.ts.map +1 -0
  37. package/dist/inference/interruption/errors.js +1 -0
  38. package/dist/inference/interruption/errors.js.map +1 -0
  39. package/dist/inference/interruption/http_transport.cjs +57 -0
  40. package/dist/inference/interruption/http_transport.cjs.map +1 -0
  41. package/dist/inference/interruption/http_transport.d.cts +23 -0
  42. package/dist/inference/interruption/http_transport.d.ts +23 -0
  43. package/dist/inference/interruption/http_transport.d.ts.map +1 -0
  44. package/dist/inference/interruption/http_transport.js +33 -0
  45. package/dist/inference/interruption/http_transport.js.map +1 -0
  46. package/dist/inference/interruption/index.cjs +34 -0
  47. package/dist/inference/interruption/index.cjs.map +1 -0
  48. package/dist/inference/interruption/index.d.cts +5 -0
  49. package/dist/inference/interruption/index.d.ts +5 -0
  50. package/dist/inference/interruption/index.d.ts.map +1 -0
  51. package/dist/inference/interruption/index.js +7 -0
  52. package/dist/inference/interruption/index.js.map +1 -0
  53. package/dist/inference/interruption/interruption.cjs +85 -0
  54. package/dist/inference/interruption/interruption.cjs.map +1 -0
  55. package/dist/inference/interruption/interruption.d.cts +48 -0
  56. package/dist/inference/interruption/interruption.d.ts +48 -0
  57. package/dist/inference/interruption/interruption.d.ts.map +1 -0
  58. package/dist/inference/interruption/interruption.js +59 -0
  59. package/dist/inference/interruption/interruption.js.map +1 -0
  60. package/dist/inference/llm.cjs +30 -3
  61. package/dist/inference/llm.cjs.map +1 -1
  62. package/dist/inference/llm.d.cts +3 -1
  63. package/dist/inference/llm.d.ts +3 -1
  64. package/dist/inference/llm.d.ts.map +1 -1
  65. package/dist/inference/llm.js +30 -3
  66. package/dist/inference/llm.js.map +1 -1
  67. package/dist/inference/utils.cjs +15 -2
  68. package/dist/inference/utils.cjs.map +1 -1
  69. package/dist/inference/utils.d.cts +1 -0
  70. package/dist/inference/utils.d.ts +1 -0
  71. package/dist/inference/utils.d.ts.map +1 -1
  72. package/dist/inference/utils.js +13 -1
  73. package/dist/inference/utils.js.map +1 -1
  74. package/dist/inference/utils.test.cjs +20 -0
  75. package/dist/inference/utils.test.cjs.map +1 -0
  76. package/dist/inference/utils.test.js +19 -0
  77. package/dist/inference/utils.test.js.map +1 -0
  78. package/dist/ipc/inference_proc_executor.cjs.map +1 -1
  79. package/dist/ipc/job_proc_executor.cjs.map +1 -1
  80. package/dist/ipc/job_proc_lazy_main.cjs +1 -1
  81. package/dist/ipc/job_proc_lazy_main.cjs.map +1 -1
  82. package/dist/ipc/job_proc_lazy_main.js +1 -1
  83. package/dist/ipc/job_proc_lazy_main.js.map +1 -1
  84. package/dist/llm/chat_context.cjs +20 -2
  85. package/dist/llm/chat_context.cjs.map +1 -1
  86. package/dist/llm/chat_context.d.cts +9 -0
  87. package/dist/llm/chat_context.d.ts +9 -0
  88. package/dist/llm/chat_context.d.ts.map +1 -1
  89. package/dist/llm/chat_context.js +20 -2
  90. package/dist/llm/chat_context.js.map +1 -1
  91. package/dist/llm/llm.cjs.map +1 -1
  92. package/dist/llm/llm.d.cts +1 -0
  93. package/dist/llm/llm.d.ts +1 -0
  94. package/dist/llm/llm.d.ts.map +1 -1
  95. package/dist/llm/llm.js.map +1 -1
  96. package/dist/llm/provider_format/openai.cjs +43 -20
  97. package/dist/llm/provider_format/openai.cjs.map +1 -1
  98. package/dist/llm/provider_format/openai.d.ts.map +1 -1
  99. package/dist/llm/provider_format/openai.js +43 -20
  100. package/dist/llm/provider_format/openai.js.map +1 -1
  101. package/dist/llm/provider_format/openai.test.cjs +35 -0
  102. package/dist/llm/provider_format/openai.test.cjs.map +1 -1
  103. package/dist/llm/provider_format/openai.test.js +35 -0
  104. package/dist/llm/provider_format/openai.test.js.map +1 -1
  105. package/dist/llm/provider_format/utils.cjs +1 -1
  106. package/dist/llm/provider_format/utils.cjs.map +1 -1
  107. package/dist/llm/provider_format/utils.d.ts.map +1 -1
  108. package/dist/llm/provider_format/utils.js +1 -1
  109. package/dist/llm/provider_format/utils.js.map +1 -1
  110. package/dist/stream/stream_channel.cjs +3 -0
  111. package/dist/stream/stream_channel.cjs.map +1 -1
  112. package/dist/stream/stream_channel.d.cts +3 -2
  113. package/dist/stream/stream_channel.d.ts +3 -2
  114. package/dist/stream/stream_channel.d.ts.map +1 -1
  115. package/dist/stream/stream_channel.js +3 -0
  116. package/dist/stream/stream_channel.js.map +1 -1
  117. package/dist/telemetry/trace_types.cjs +15 -0
  118. package/dist/telemetry/trace_types.cjs.map +1 -1
  119. package/dist/telemetry/trace_types.d.cts +5 -0
  120. package/dist/telemetry/trace_types.d.ts +5 -0
  121. package/dist/telemetry/trace_types.d.ts.map +1 -1
  122. package/dist/telemetry/trace_types.js +10 -0
  123. package/dist/telemetry/trace_types.js.map +1 -1
  124. package/dist/utils/ws_transport.cjs +51 -0
  125. package/dist/utils/ws_transport.cjs.map +1 -0
  126. package/dist/utils/ws_transport.d.cts +9 -0
  127. package/dist/utils/ws_transport.d.ts +9 -0
  128. package/dist/utils/ws_transport.d.ts.map +1 -0
  129. package/dist/utils/ws_transport.js +17 -0
  130. package/dist/utils/ws_transport.js.map +1 -0
  131. package/dist/utils/ws_transport.test.cjs +212 -0
  132. package/dist/utils/ws_transport.test.cjs.map +1 -0
  133. package/dist/utils/ws_transport.test.js +211 -0
  134. package/dist/utils/ws_transport.test.js.map +1 -0
  135. package/dist/voice/agent_activity.cjs +49 -0
  136. package/dist/voice/agent_activity.cjs.map +1 -1
  137. package/dist/voice/agent_activity.d.cts +14 -0
  138. package/dist/voice/agent_activity.d.ts +14 -0
  139. package/dist/voice/agent_activity.d.ts.map +1 -1
  140. package/dist/voice/agent_activity.js +49 -0
  141. package/dist/voice/agent_activity.js.map +1 -1
  142. package/dist/voice/agent_session.cjs +12 -1
  143. package/dist/voice/agent_session.cjs.map +1 -1
  144. package/dist/voice/agent_session.d.cts +3 -0
  145. package/dist/voice/agent_session.d.ts +3 -0
  146. package/dist/voice/agent_session.d.ts.map +1 -1
  147. package/dist/voice/agent_session.js +12 -1
  148. package/dist/voice/agent_session.js.map +1 -1
  149. package/dist/voice/audio_recognition.cjs +124 -2
  150. package/dist/voice/audio_recognition.cjs.map +1 -1
  151. package/dist/voice/audio_recognition.d.cts +32 -1
  152. package/dist/voice/audio_recognition.d.ts +32 -1
  153. package/dist/voice/audio_recognition.d.ts.map +1 -1
  154. package/dist/voice/audio_recognition.js +127 -2
  155. package/dist/voice/audio_recognition.js.map +1 -1
  156. package/dist/voice/background_audio.cjs.map +1 -1
  157. package/dist/voice/generation.cjs +2 -1
  158. package/dist/voice/generation.cjs.map +1 -1
  159. package/dist/voice/generation.d.ts.map +1 -1
  160. package/dist/voice/generation.js +2 -1
  161. package/dist/voice/generation.js.map +1 -1
  162. package/package.json +2 -1
  163. package/src/index.ts +2 -0
  164. package/src/inference/interruption/AdaptiveInterruptionDetector.ts +166 -0
  165. package/src/inference/interruption/InterruptionStream.ts +397 -0
  166. package/src/inference/interruption/defaults.ts +33 -0
  167. package/src/inference/interruption/errors.ts +0 -0
  168. package/src/inference/interruption/http_transport.ts +61 -0
  169. package/src/inference/interruption/index.ts +4 -0
  170. package/src/inference/interruption/interruption.ts +88 -0
  171. package/src/inference/llm.ts +42 -3
  172. package/src/inference/utils.test.ts +31 -0
  173. package/src/inference/utils.ts +15 -0
  174. package/src/ipc/job_proc_lazy_main.ts +1 -1
  175. package/src/llm/chat_context.ts +32 -2
  176. package/src/llm/llm.ts +1 -0
  177. package/src/llm/provider_format/openai.test.ts +40 -0
  178. package/src/llm/provider_format/openai.ts +46 -19
  179. package/src/llm/provider_format/utils.ts +5 -1
  180. package/src/stream/stream_channel.ts +6 -2
  181. package/src/telemetry/trace_types.ts +7 -0
  182. package/src/utils/ws_transport.test.ts +282 -0
  183. package/src/utils/ws_transport.ts +22 -0
  184. package/src/voice/agent_activity.ts +61 -0
  185. package/src/voice/agent_session.ts +22 -2
  186. package/src/voice/audio_recognition.ts +161 -1
  187. package/src/voice/generation.ts +1 -0
@@ -0,0 +1,33 @@
1
+ import { ofetch } from "ofetch";
2
+ async function predictHTTP(data, predictOptions, options) {
3
+ const createdAt = performance.now();
4
+ const url = new URL(`/bargein`, options.baseUrl);
5
+ url.searchParams.append("threshold", predictOptions.threshold.toString());
6
+ url.searchParams.append("min_frames", predictOptions.minFrames.toFixed());
7
+ url.searchParams.append("created_at", createdAt.toFixed());
8
+ const { created_at, is_bargein, probabilities } = await ofetch(
9
+ url.toString(),
10
+ {
11
+ retry: 1,
12
+ retryDelay: 100,
13
+ headers: {
14
+ "Content-Type": "application/octet-stream",
15
+ Authorization: `Bearer ${options.token}`
16
+ },
17
+ signal: options.signal,
18
+ timeout: options.timeout,
19
+ method: "POST",
20
+ body: data
21
+ }
22
+ );
23
+ return {
24
+ createdAt: created_at,
25
+ isBargein: is_bargein,
26
+ probabilities: new Float32Array(probabilities),
27
+ predictionDuration: (performance.now() - createdAt) / 1e9
28
+ };
29
+ }
30
+ export {
31
+ predictHTTP
32
+ };
33
+ //# sourceMappingURL=http_transport.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/inference/interruption/http_transport.ts"],"sourcesContent":["import { ofetch } from 'ofetch';\n\nexport interface PostOptions {\n baseUrl: string;\n token: string;\n signal?: AbortSignal;\n timeout?: number;\n}\n\nexport interface PredictOptions {\n threshold: number;\n minFrames: number;\n}\n\nexport interface PredictEndpointResponse {\n created_at: number;\n is_bargein: boolean;\n probabilities: number[];\n}\n\nexport interface PredictResponse {\n createdAt: number;\n isBargein: boolean;\n probabilities: Float32Array;\n predictionDuration: number;\n}\n\nexport async function predictHTTP(\n data: Int16Array,\n predictOptions: PredictOptions,\n options: PostOptions,\n): Promise<PredictResponse> {\n const createdAt = performance.now();\n const url = new URL(`/bargein`, options.baseUrl);\n url.searchParams.append('threshold', predictOptions.threshold.toString());\n url.searchParams.append('min_frames', predictOptions.minFrames.toFixed());\n url.searchParams.append('created_at', createdAt.toFixed());\n\n const { created_at, is_bargein, probabilities } = await ofetch<PredictEndpointResponse>(\n url.toString(),\n {\n retry: 1,\n retryDelay: 100,\n headers: {\n 'Content-Type': 'application/octet-stream',\n Authorization: `Bearer ${options.token}`,\n },\n signal: options.signal,\n timeout: options.timeout,\n method: 'POST',\n body: data,\n },\n );\n\n return {\n createdAt: created_at,\n isBargein: is_bargein,\n probabilities: new Float32Array(probabilities),\n predictionDuration: (performance.now() - createdAt) / 1e9,\n };\n}\n"],"mappings":"AAAA,SAAS,cAAc;AA2BvB,eAAsB,YACpB,MACA,gBACA,SAC0B;AAC1B,QAAM,YAAY,YAAY,IAAI;AAClC,QAAM,MAAM,IAAI,IAAI,YAAY,QAAQ,OAAO;AAC/C,MAAI,aAAa,OAAO,aAAa,eAAe,UAAU,SAAS,CAAC;AACxE,MAAI,aAAa,OAAO,cAAc,eAAe,UAAU,QAAQ,CAAC;AACxE,MAAI,aAAa,OAAO,cAAc,UAAU,QAAQ,CAAC;AAEzD,QAAM,EAAE,YAAY,YAAY,cAAc,IAAI,MAAM;AAAA,IACtD,IAAI,SAAS;AAAA,IACb;AAAA,MACE,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,QAAQ,KAAK;AAAA,MACxC;AAAA,MACA,QAAQ,QAAQ;AAAA,MAChB,SAAS,QAAQ;AAAA,MACjB,QAAQ;AAAA,MACR,MAAM;AAAA,IACR;AAAA,EACF;AAEA,SAAO;AAAA,IACL,WAAW;AAAA,IACX,WAAW;AAAA,IACX,eAAe,IAAI,aAAa,aAAa;AAAA,IAC7C,qBAAqB,YAAY,IAAI,IAAI,aAAa;AAAA,EACxD;AACF;","names":[]}
@@ -0,0 +1,34 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
19
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
20
+ var interruption_exports = {};
21
+ __export(interruption_exports, {
22
+ InterruptionStreamSentinel: () => import_InterruptionStream.InterruptionStreamSentinel
23
+ });
24
+ module.exports = __toCommonJS(interruption_exports);
25
+ __reExport(interruption_exports, require("./AdaptiveInterruptionDetector.cjs"), module.exports);
26
+ __reExport(interruption_exports, require("./interruption.cjs"), module.exports);
27
+ var import_InterruptionStream = require("./InterruptionStream.cjs");
28
+ // Annotate the CommonJS export names for ESM import in node:
29
+ 0 && (module.exports = {
30
+ InterruptionStreamSentinel,
31
+ ...require("./AdaptiveInterruptionDetector.cjs"),
32
+ ...require("./interruption.cjs")
33
+ });
34
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/inference/interruption/index.ts"],"sourcesContent":["export * from './AdaptiveInterruptionDetector.js';\nexport * from './interruption.js';\nexport { InterruptionStreamSentinel } from './InterruptionStream.js';\nexport type { InterruptionSentinel } from './InterruptionStream.js';\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iCAAc,8CAAd;AACA,iCAAc,8BADd;AAEA,gCAA2C;","names":[]}
@@ -0,0 +1,5 @@
1
+ export * from './AdaptiveInterruptionDetector.js';
2
+ export * from './interruption.js';
3
+ export { InterruptionStreamSentinel } from './InterruptionStream.js';
4
+ export type { InterruptionSentinel } from './InterruptionStream.js';
5
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1,5 @@
1
+ export * from './AdaptiveInterruptionDetector.js';
2
+ export * from './interruption.js';
3
+ export { InterruptionStreamSentinel } from './InterruptionStream.js';
4
+ export type { InterruptionSentinel } from './InterruptionStream.js';
5
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/inference/interruption/index.ts"],"names":[],"mappings":"AAAA,cAAc,mCAAmC,CAAC;AAClD,cAAc,mBAAmB,CAAC;AAClC,OAAO,EAAE,0BAA0B,EAAE,MAAM,yBAAyB,CAAC;AACrE,YAAY,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC"}
@@ -0,0 +1,7 @@
1
+ export * from "./AdaptiveInterruptionDetector.js";
2
+ export * from "./interruption.js";
3
+ import { InterruptionStreamSentinel } from "./InterruptionStream.js";
4
+ export {
5
+ InterruptionStreamSentinel
6
+ };
7
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/inference/interruption/index.ts"],"sourcesContent":["export * from './AdaptiveInterruptionDetector.js';\nexport * from './interruption.js';\nexport { InterruptionStreamSentinel } from './InterruptionStream.js';\nexport type { InterruptionSentinel } from './InterruptionStream.js';\n"],"mappings":"AAAA,cAAc;AACd,cAAc;AACd,SAAS,kCAAkC;","names":[]}
@@ -0,0 +1,85 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var interruption_exports = {};
20
+ __export(interruption_exports, {
21
+ InterruptionCacheEntry: () => InterruptionCacheEntry,
22
+ InterruptionDetectionError: () => InterruptionDetectionError,
23
+ InterruptionEventType: () => InterruptionEventType
24
+ });
25
+ module.exports = __toCommonJS(interruption_exports);
26
+ var import_utils = require("../utils.cjs");
27
+ var import_defaults = require("./defaults.cjs");
28
+ var InterruptionEventType = /* @__PURE__ */ ((InterruptionEventType2) => {
29
+ InterruptionEventType2["INTERRUPTION"] = "interruption";
30
+ InterruptionEventType2["OVERLAP_SPEECH_ENDED"] = "overlap_speech_ended";
31
+ return InterruptionEventType2;
32
+ })(InterruptionEventType || {});
33
+ class InterruptionDetectionError extends Error {
34
+ type = "InterruptionDetectionError";
35
+ timestamp;
36
+ label;
37
+ recoverable;
38
+ constructor(message, timestamp, label, recoverable) {
39
+ super(message);
40
+ this.name = "InterruptionDetectionError";
41
+ this.timestamp = timestamp;
42
+ this.label = label;
43
+ this.recoverable = recoverable;
44
+ }
45
+ toString() {
46
+ return `${this.name}: ${this.message} (label=${this.label}, timestamp=${this.timestamp}, recoverable=${this.recoverable})`;
47
+ }
48
+ }
49
+ function estimateProbability(probabilities, windowSize = import_defaults.MIN_INTERRUPTION_DURATION) {
50
+ const minWindow = Math.ceil(windowSize / 0.025);
51
+ if (probabilities.length < minWindow) {
52
+ return 0;
53
+ }
54
+ return (0, import_utils.slidingWindowMinMax)(probabilities, windowSize);
55
+ }
56
+ class InterruptionCacheEntry {
57
+ createdAt;
58
+ totalDuration;
59
+ predictionDuration;
60
+ detectionDelay;
61
+ speechInput;
62
+ probabilities;
63
+ isInterruption;
64
+ probability;
65
+ constructor(params) {
66
+ this.createdAt = params.createdAt;
67
+ this.totalDuration = params.totalDuration ?? 0;
68
+ this.predictionDuration = params.predictionDuration ?? 0;
69
+ this.detectionDelay = params.detectionDelay ?? 0;
70
+ this.speechInput = params.speechInput;
71
+ this.probabilities = params.probabilities;
72
+ this.isInterruption = params.isInterruption;
73
+ this.probability = this.probabilities ? estimateProbability(this.probabilities) : 0;
74
+ }
75
+ static default() {
76
+ return new InterruptionCacheEntry({ createdAt: 0 });
77
+ }
78
+ }
79
+ // Annotate the CommonJS export names for ESM import in node:
80
+ 0 && (module.exports = {
81
+ InterruptionCacheEntry,
82
+ InterruptionDetectionError,
83
+ InterruptionEventType
84
+ });
85
+ //# sourceMappingURL=interruption.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/inference/interruption/interruption.ts"],"sourcesContent":["import { slidingWindowMinMax } from '../utils.js';\nimport { MIN_INTERRUPTION_DURATION } from './defaults.js';\n\nexport enum InterruptionEventType {\n INTERRUPTION = 'interruption',\n OVERLAP_SPEECH_ENDED = 'overlap_speech_ended',\n}\nexport interface InterruptionEvent {\n type: InterruptionEventType;\n timestamp: number;\n isInterruption: boolean;\n totalDuration: number;\n predictionDuration: number;\n detectionDelay: number;\n overlapSpeechStartedAt?: number;\n speechInput?: Int16Array;\n probabilities?: Float32Array;\n probability: number;\n}\n\nexport class InterruptionDetectionError extends Error {\n readonly type = 'InterruptionDetectionError';\n\n readonly timestamp: number;\n readonly label: string;\n readonly recoverable: boolean;\n\n constructor(message: string, timestamp: number, label: string, recoverable: boolean) {\n super(message);\n this.name = 'InterruptionDetectionError';\n this.timestamp = timestamp;\n this.label = label;\n this.recoverable = recoverable;\n }\n\n toString(): string {\n return `${this.name}: ${this.message} (label=${this.label}, timestamp=${this.timestamp}, recoverable=${this.recoverable})`;\n }\n}\n\nfunction estimateProbability(\n probabilities: Float32Array,\n windowSize: number = MIN_INTERRUPTION_DURATION,\n): number {\n const minWindow = Math.ceil(windowSize / 0.025); // 25ms per frame\n if (probabilities.length < minWindow) {\n return 0;\n }\n\n return slidingWindowMinMax(probabilities, windowSize);\n}\n\n/**\n * Typed cache entry for interruption inference results.\n */\nexport class InterruptionCacheEntry {\n readonly createdAt: number;\n readonly totalDuration: number;\n readonly predictionDuration: number;\n readonly detectionDelay: number;\n readonly speechInput?: Int16Array;\n readonly probabilities?: Float32Array;\n readonly isInterruption?: boolean;\n readonly probability: number;\n\n constructor(params: {\n createdAt: number;\n speechInput?: Int16Array;\n totalDuration?: number;\n predictionDuration?: number;\n detectionDelay?: number;\n probabilities?: Float32Array;\n isInterruption?: boolean;\n }) {\n this.createdAt = params.createdAt;\n this.totalDuration = params.totalDuration ?? 0;\n this.predictionDuration = params.predictionDuration ?? 0;\n this.detectionDelay = params.detectionDelay ?? 0;\n this.speechInput = params.speechInput;\n this.probabilities = params.probabilities;\n this.isInterruption = params.isInterruption;\n this.probability = this.probabilities ? estimateProbability(this.probabilities) : 0;\n }\n\n static default(): InterruptionCacheEntry {\n return new InterruptionCacheEntry({ createdAt: 0 });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAoC;AACpC,sBAA0C;AAEnC,IAAK,wBAAL,kBAAKA,2BAAL;AACL,EAAAA,uBAAA,kBAAe;AACf,EAAAA,uBAAA,0BAAuB;AAFb,SAAAA;AAAA,GAAA;AAiBL,MAAM,mCAAmC,MAAM;AAAA,EAC3C,OAAO;AAAA,EAEP;AAAA,EACA;AAAA,EACA;AAAA,EAET,YAAY,SAAiB,WAAmB,OAAe,aAAsB;AACnF,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,YAAY;AACjB,SAAK,QAAQ;AACb,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,WAAmB;AACjB,WAAO,GAAG,KAAK,IAAI,KAAK,KAAK,OAAO,WAAW,KAAK,KAAK,eAAe,KAAK,SAAS,iBAAiB,KAAK,WAAW;AAAA,EACzH;AACF;AAEA,SAAS,oBACP,eACA,aAAqB,2CACb;AACR,QAAM,YAAY,KAAK,KAAK,aAAa,KAAK;AAC9C,MAAI,cAAc,SAAS,WAAW;AACpC,WAAO;AAAA,EACT;AAEA,aAAO,kCAAoB,eAAe,UAAU;AACtD;AAKO,MAAM,uBAAuB;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,YAAY,QAQT;AACD,SAAK,YAAY,OAAO;AACxB,SAAK,gBAAgB,OAAO,iBAAiB;AAC7C,SAAK,qBAAqB,OAAO,sBAAsB;AACvD,SAAK,iBAAiB,OAAO,kBAAkB;AAC/C,SAAK,cAAc,OAAO;AAC1B,SAAK,gBAAgB,OAAO;AAC5B,SAAK,iBAAiB,OAAO;AAC7B,SAAK,cAAc,KAAK,gBAAgB,oBAAoB,KAAK,aAAa,IAAI;AAAA,EACpF;AAAA,EAEA,OAAO,UAAkC;AACvC,WAAO,IAAI,uBAAuB,EAAE,WAAW,EAAE,CAAC;AAAA,EACpD;AACF;","names":["InterruptionEventType"]}
@@ -0,0 +1,48 @@
1
+ export declare enum InterruptionEventType {
2
+ INTERRUPTION = "interruption",
3
+ OVERLAP_SPEECH_ENDED = "overlap_speech_ended"
4
+ }
5
+ export interface InterruptionEvent {
6
+ type: InterruptionEventType;
7
+ timestamp: number;
8
+ isInterruption: boolean;
9
+ totalDuration: number;
10
+ predictionDuration: number;
11
+ detectionDelay: number;
12
+ overlapSpeechStartedAt?: number;
13
+ speechInput?: Int16Array;
14
+ probabilities?: Float32Array;
15
+ probability: number;
16
+ }
17
+ export declare class InterruptionDetectionError extends Error {
18
+ readonly type = "InterruptionDetectionError";
19
+ readonly timestamp: number;
20
+ readonly label: string;
21
+ readonly recoverable: boolean;
22
+ constructor(message: string, timestamp: number, label: string, recoverable: boolean);
23
+ toString(): string;
24
+ }
25
+ /**
26
+ * Typed cache entry for interruption inference results.
27
+ */
28
+ export declare class InterruptionCacheEntry {
29
+ readonly createdAt: number;
30
+ readonly totalDuration: number;
31
+ readonly predictionDuration: number;
32
+ readonly detectionDelay: number;
33
+ readonly speechInput?: Int16Array;
34
+ readonly probabilities?: Float32Array;
35
+ readonly isInterruption?: boolean;
36
+ readonly probability: number;
37
+ constructor(params: {
38
+ createdAt: number;
39
+ speechInput?: Int16Array;
40
+ totalDuration?: number;
41
+ predictionDuration?: number;
42
+ detectionDelay?: number;
43
+ probabilities?: Float32Array;
44
+ isInterruption?: boolean;
45
+ });
46
+ static default(): InterruptionCacheEntry;
47
+ }
48
+ //# sourceMappingURL=interruption.d.ts.map
@@ -0,0 +1,48 @@
1
+ export declare enum InterruptionEventType {
2
+ INTERRUPTION = "interruption",
3
+ OVERLAP_SPEECH_ENDED = "overlap_speech_ended"
4
+ }
5
+ export interface InterruptionEvent {
6
+ type: InterruptionEventType;
7
+ timestamp: number;
8
+ isInterruption: boolean;
9
+ totalDuration: number;
10
+ predictionDuration: number;
11
+ detectionDelay: number;
12
+ overlapSpeechStartedAt?: number;
13
+ speechInput?: Int16Array;
14
+ probabilities?: Float32Array;
15
+ probability: number;
16
+ }
17
+ export declare class InterruptionDetectionError extends Error {
18
+ readonly type = "InterruptionDetectionError";
19
+ readonly timestamp: number;
20
+ readonly label: string;
21
+ readonly recoverable: boolean;
22
+ constructor(message: string, timestamp: number, label: string, recoverable: boolean);
23
+ toString(): string;
24
+ }
25
+ /**
26
+ * Typed cache entry for interruption inference results.
27
+ */
28
+ export declare class InterruptionCacheEntry {
29
+ readonly createdAt: number;
30
+ readonly totalDuration: number;
31
+ readonly predictionDuration: number;
32
+ readonly detectionDelay: number;
33
+ readonly speechInput?: Int16Array;
34
+ readonly probabilities?: Float32Array;
35
+ readonly isInterruption?: boolean;
36
+ readonly probability: number;
37
+ constructor(params: {
38
+ createdAt: number;
39
+ speechInput?: Int16Array;
40
+ totalDuration?: number;
41
+ predictionDuration?: number;
42
+ detectionDelay?: number;
43
+ probabilities?: Float32Array;
44
+ isInterruption?: boolean;
45
+ });
46
+ static default(): InterruptionCacheEntry;
47
+ }
48
+ //# sourceMappingURL=interruption.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"interruption.d.ts","sourceRoot":"","sources":["../../../src/inference/interruption/interruption.ts"],"names":[],"mappings":"AAGA,oBAAY,qBAAqB;IAC/B,YAAY,iBAAiB;IAC7B,oBAAoB,yBAAyB;CAC9C;AACD,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,qBAAqB,CAAC;IAC5B,SAAS,EAAE,MAAM,CAAC;IAClB,cAAc,EAAE,OAAO,CAAC;IACxB,aAAa,EAAE,MAAM,CAAC;IACtB,kBAAkB,EAAE,MAAM,CAAC;IAC3B,cAAc,EAAE,MAAM,CAAC;IACvB,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAChC,WAAW,CAAC,EAAE,UAAU,CAAC;IACzB,aAAa,CAAC,EAAE,YAAY,CAAC;IAC7B,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,qBAAa,0BAA2B,SAAQ,KAAK;IACnD,QAAQ,CAAC,IAAI,gCAAgC;IAE7C,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC;gBAElB,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,WAAW,EAAE,OAAO;IAQnF,QAAQ,IAAI,MAAM;CAGnB;AAcD;;GAEG;AACH,qBAAa,sBAAsB;IACjC,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,aAAa,EAAE,MAAM,CAAC;IAC/B,QAAQ,CAAC,kBAAkB,EAAE,MAAM,CAAC;IACpC,QAAQ,CAAC,cAAc,EAAE,MAAM,CAAC;IAChC,QAAQ,CAAC,WAAW,CAAC,EAAE,UAAU,CAAC;IAClC,QAAQ,CAAC,aAAa,CAAC,EAAE,YAAY,CAAC;IACtC,QAAQ,CAAC,cAAc,CAAC,EAAE,OAAO,CAAC;IAClC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;gBAEjB,MAAM,EAAE;QAClB,SAAS,EAAE,MAAM,CAAC;QAClB,WAAW,CAAC,EAAE,UAAU,CAAC;QACzB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,kBAAkB,CAAC,EAAE,MAAM,CAAC;QAC5B,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,aAAa,CAAC,EAAE,YAAY,CAAC;QAC7B,cAAc,CAAC,EAAE,OAAO,CAAC;KAC1B;IAWD,MAAM,CAAC,OAAO,IAAI,sBAAsB;CAGzC"}
@@ -0,0 +1,59 @@
1
+ import { slidingWindowMinMax } from "../utils.js";
2
+ import { MIN_INTERRUPTION_DURATION } from "./defaults.js";
3
+ var InterruptionEventType = /* @__PURE__ */ ((InterruptionEventType2) => {
4
+ InterruptionEventType2["INTERRUPTION"] = "interruption";
5
+ InterruptionEventType2["OVERLAP_SPEECH_ENDED"] = "overlap_speech_ended";
6
+ return InterruptionEventType2;
7
+ })(InterruptionEventType || {});
8
+ class InterruptionDetectionError extends Error {
9
+ type = "InterruptionDetectionError";
10
+ timestamp;
11
+ label;
12
+ recoverable;
13
+ constructor(message, timestamp, label, recoverable) {
14
+ super(message);
15
+ this.name = "InterruptionDetectionError";
16
+ this.timestamp = timestamp;
17
+ this.label = label;
18
+ this.recoverable = recoverable;
19
+ }
20
+ toString() {
21
+ return `${this.name}: ${this.message} (label=${this.label}, timestamp=${this.timestamp}, recoverable=${this.recoverable})`;
22
+ }
23
+ }
24
+ function estimateProbability(probabilities, windowSize = MIN_INTERRUPTION_DURATION) {
25
+ const minWindow = Math.ceil(windowSize / 0.025);
26
+ if (probabilities.length < minWindow) {
27
+ return 0;
28
+ }
29
+ return slidingWindowMinMax(probabilities, windowSize);
30
+ }
31
+ class InterruptionCacheEntry {
32
+ createdAt;
33
+ totalDuration;
34
+ predictionDuration;
35
+ detectionDelay;
36
+ speechInput;
37
+ probabilities;
38
+ isInterruption;
39
+ probability;
40
+ constructor(params) {
41
+ this.createdAt = params.createdAt;
42
+ this.totalDuration = params.totalDuration ?? 0;
43
+ this.predictionDuration = params.predictionDuration ?? 0;
44
+ this.detectionDelay = params.detectionDelay ?? 0;
45
+ this.speechInput = params.speechInput;
46
+ this.probabilities = params.probabilities;
47
+ this.isInterruption = params.isInterruption;
48
+ this.probability = this.probabilities ? estimateProbability(this.probabilities) : 0;
49
+ }
50
+ static default() {
51
+ return new InterruptionCacheEntry({ createdAt: 0 });
52
+ }
53
+ }
54
+ export {
55
+ InterruptionCacheEntry,
56
+ InterruptionDetectionError,
57
+ InterruptionEventType
58
+ };
59
+ //# sourceMappingURL=interruption.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/inference/interruption/interruption.ts"],"sourcesContent":["import { slidingWindowMinMax } from '../utils.js';\nimport { MIN_INTERRUPTION_DURATION } from './defaults.js';\n\nexport enum InterruptionEventType {\n INTERRUPTION = 'interruption',\n OVERLAP_SPEECH_ENDED = 'overlap_speech_ended',\n}\nexport interface InterruptionEvent {\n type: InterruptionEventType;\n timestamp: number;\n isInterruption: boolean;\n totalDuration: number;\n predictionDuration: number;\n detectionDelay: number;\n overlapSpeechStartedAt?: number;\n speechInput?: Int16Array;\n probabilities?: Float32Array;\n probability: number;\n}\n\nexport class InterruptionDetectionError extends Error {\n readonly type = 'InterruptionDetectionError';\n\n readonly timestamp: number;\n readonly label: string;\n readonly recoverable: boolean;\n\n constructor(message: string, timestamp: number, label: string, recoverable: boolean) {\n super(message);\n this.name = 'InterruptionDetectionError';\n this.timestamp = timestamp;\n this.label = label;\n this.recoverable = recoverable;\n }\n\n toString(): string {\n return `${this.name}: ${this.message} (label=${this.label}, timestamp=${this.timestamp}, recoverable=${this.recoverable})`;\n }\n}\n\nfunction estimateProbability(\n probabilities: Float32Array,\n windowSize: number = MIN_INTERRUPTION_DURATION,\n): number {\n const minWindow = Math.ceil(windowSize / 0.025); // 25ms per frame\n if (probabilities.length < minWindow) {\n return 0;\n }\n\n return slidingWindowMinMax(probabilities, windowSize);\n}\n\n/**\n * Typed cache entry for interruption inference results.\n */\nexport class InterruptionCacheEntry {\n readonly createdAt: number;\n readonly totalDuration: number;\n readonly predictionDuration: number;\n readonly detectionDelay: number;\n readonly speechInput?: Int16Array;\n readonly probabilities?: Float32Array;\n readonly isInterruption?: boolean;\n readonly probability: number;\n\n constructor(params: {\n createdAt: number;\n speechInput?: Int16Array;\n totalDuration?: number;\n predictionDuration?: number;\n detectionDelay?: number;\n probabilities?: Float32Array;\n isInterruption?: boolean;\n }) {\n this.createdAt = params.createdAt;\n this.totalDuration = params.totalDuration ?? 0;\n this.predictionDuration = params.predictionDuration ?? 0;\n this.detectionDelay = params.detectionDelay ?? 0;\n this.speechInput = params.speechInput;\n this.probabilities = params.probabilities;\n this.isInterruption = params.isInterruption;\n this.probability = this.probabilities ? estimateProbability(this.probabilities) : 0;\n }\n\n static default(): InterruptionCacheEntry {\n return new InterruptionCacheEntry({ createdAt: 0 });\n }\n}\n"],"mappings":"AAAA,SAAS,2BAA2B;AACpC,SAAS,iCAAiC;AAEnC,IAAK,wBAAL,kBAAKA,2BAAL;AACL,EAAAA,uBAAA,kBAAe;AACf,EAAAA,uBAAA,0BAAuB;AAFb,SAAAA;AAAA,GAAA;AAiBL,MAAM,mCAAmC,MAAM;AAAA,EAC3C,OAAO;AAAA,EAEP;AAAA,EACA;AAAA,EACA;AAAA,EAET,YAAY,SAAiB,WAAmB,OAAe,aAAsB;AACnF,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,YAAY;AACjB,SAAK,QAAQ;AACb,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,WAAmB;AACjB,WAAO,GAAG,KAAK,IAAI,KAAK,KAAK,OAAO,WAAW,KAAK,KAAK,eAAe,KAAK,SAAS,iBAAiB,KAAK,WAAW;AAAA,EACzH;AACF;AAEA,SAAS,oBACP,eACA,aAAqB,2BACb;AACR,QAAM,YAAY,KAAK,KAAK,aAAa,KAAK;AAC9C,MAAI,cAAc,SAAS,WAAW;AACpC,WAAO;AAAA,EACT;AAEA,SAAO,oBAAoB,eAAe,UAAU;AACtD;AAKO,MAAM,uBAAuB;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,YAAY,QAQT;AACD,SAAK,YAAY,OAAO;AACxB,SAAK,gBAAgB,OAAO,iBAAiB;AAC7C,SAAK,qBAAqB,OAAO,sBAAsB;AACvD,SAAK,iBAAiB,OAAO,kBAAkB;AAC/C,SAAK,cAAc,OAAO;AAC1B,SAAK,gBAAgB,OAAO;AAC5B,SAAK,iBAAiB,OAAO;AAC7B,SAAK,cAAc,KAAK,gBAAgB,oBAAoB,KAAK,aAAa,IAAI;AAAA,EACpF;AAAA,EAEA,OAAO,UAAkC;AACvC,WAAO,IAAI,uBAAuB,EAAE,WAAW,EAAE,CAAC;AAAA,EACpD;AACF;","names":["InterruptionEventType"]}
@@ -132,6 +132,7 @@ class LLMStream extends llm.LLMStream {
132
132
  toolIndex;
133
133
  fncName;
134
134
  fncRawArguments;
135
+ toolExtra;
135
136
  constructor(llm2, {
136
137
  model,
137
138
  provider,
@@ -157,6 +158,7 @@ class LLMStream extends llm.LLMStream {
157
158
  var _a;
158
159
  let retryable = true;
159
160
  this.toolCallId = this.fncName = this.fncRawArguments = this.toolIndex = void 0;
161
+ this.toolExtra = void 0;
160
162
  try {
161
163
  const messages = await this.chatCtx.toProviderFormat(
162
164
  this.providerFmt
@@ -265,12 +267,15 @@ class LLMStream extends llm.LLMStream {
265
267
  if (this.toolCallId && tool.id && tool.index !== this.toolIndex) {
266
268
  callChunk = this.createRunningToolCallChunk(id, delta);
267
269
  this.toolCallId = this.fncName = this.fncRawArguments = void 0;
270
+ this.toolExtra = void 0;
268
271
  }
269
272
  if (tool.function.name) {
270
273
  this.toolIndex = tool.index;
271
274
  this.toolCallId = tool.id;
272
275
  this.fncName = tool.function.name;
273
276
  this.fncRawArguments = tool.function.arguments || "";
277
+ this.toolExtra = // eslint-disable-next-line @typescript-eslint/no-explicit-any
278
+ tool.extra_content ?? void 0;
274
279
  } else if (tool.function.arguments) {
275
280
  this.fncRawArguments = (this.fncRawArguments || "") + tool.function.arguments;
276
281
  }
@@ -282,35 +287,57 @@ class LLMStream extends llm.LLMStream {
282
287
  if (choice.finish_reason && ["tool_calls", "stop"].includes(choice.finish_reason) && this.toolCallId !== void 0) {
283
288
  const callChunk = this.createRunningToolCallChunk(id, delta);
284
289
  this.toolCallId = this.fncName = this.fncRawArguments = void 0;
290
+ this.toolExtra = void 0;
285
291
  return callChunk;
286
292
  }
287
- if (!delta.content) {
293
+ const deltaExtra = (
294
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
295
+ delta.extra_content ?? void 0
296
+ );
297
+ if (!delta.content && !deltaExtra) {
288
298
  return void 0;
289
299
  }
290
300
  return {
291
301
  id,
292
302
  delta: {
293
303
  role: "assistant",
294
- content: delta.content
304
+ content: delta.content || void 0,
305
+ extra: deltaExtra
295
306
  }
296
307
  };
297
308
  }
298
309
  createRunningToolCallChunk(id, delta) {
310
+ const toolExtra = this.toolExtra ? { ...this.toolExtra } : {};
311
+ const thoughtSignature = this.extractThoughtSignature(toolExtra);
312
+ const deltaExtra = (
313
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
314
+ delta.extra_content ?? void 0
315
+ );
299
316
  return {
300
317
  id,
301
318
  delta: {
302
319
  role: "assistant",
303
320
  content: delta.content || void 0,
321
+ extra: deltaExtra,
304
322
  toolCalls: [
305
323
  llm.FunctionCall.create({
306
324
  callId: this.toolCallId || "",
307
325
  name: this.fncName || "",
308
- args: this.fncRawArguments || ""
326
+ args: this.fncRawArguments || "",
327
+ extra: toolExtra,
328
+ thoughtSignature
309
329
  })
310
330
  ]
311
331
  }
312
332
  };
313
333
  }
334
+ extractThoughtSignature(extra) {
335
+ const googleExtra = extra == null ? void 0 : extra.google;
336
+ if (googleExtra && typeof googleExtra === "object") {
337
+ return googleExtra.thoughtSignature || googleExtra.thought_signature;
338
+ }
339
+ return void 0;
340
+ }
314
341
  }
315
342
  // Annotate the CommonJS export names for ESM import in node:
316
343
  0 && (module.exports = {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/inference/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport OpenAI from 'openai';\nimport {\n APIConnectionError,\n APIStatusError,\n APITimeoutError,\n DEFAULT_API_CONNECT_OPTIONS,\n type Expand,\n toError,\n} from '../index.js';\nimport * as llm from '../llm/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { type AnyString, createAccessToken } from './utils.js';\n\nconst DEFAULT_BASE_URL = 'https://agent-gateway.livekit.cloud/v1';\n\nexport type OpenAIModels =\n | 'openai/gpt-5'\n | 'openai/gpt-5-mini'\n | 'openai/gpt-5-nano'\n | 'openai/gpt-4.1'\n | 'openai/gpt-4.1-mini'\n | 'openai/gpt-4.1-nano'\n | 'openai/gpt-4o'\n | 'openai/gpt-4o-mini'\n | 'openai/gpt-oss-120b';\n\nexport type GoogleModels = 'google/gemini-2.0-flash-lite';\n\nexport type QwenModels = 'qwen/qwen3-235b-a22b-instruct';\n\nexport type KimiModels = 'moonshotai/kimi-k2-instruct';\n\nexport type DeepSeekModels = 'deepseek-ai/deepseek-v3';\n\ntype ChatCompletionPredictionContentParam =\n Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;\ntype WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;\ntype ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;\ntype Verbosity = 'low' | 'medium' | 'high';\n\nexport interface ChatCompletionOptions extends Record<string, unknown> {\n frequency_penalty?: number;\n logit_bias?: Record<string, number>;\n logprobs?: boolean;\n max_completion_tokens?: number;\n max_tokens?: number;\n metadata?: Record<string, string>;\n modalities?: Array<'text' | 'audio'>;\n n?: number;\n parallel_tool_calls?: boolean;\n prediction?: ChatCompletionPredictionContentParam | null;\n presence_penalty?: number;\n prompt_cache_key?: string;\n reasoning_effort?: 'minimal' | 'low' | 'medium' | 'high';\n safety_identifier?: string;\n seed?: number;\n service_tier?: 'auto' | 'default' | 'flex' | 'scale' | 'priority';\n stop?: string | string[];\n store?: boolean;\n temperature?: number;\n top_logprobs?: number;\n top_p?: number;\n user?: string;\n verbosity?: Verbosity;\n web_search_options?: WebSearchOptions;\n\n // livekit-typed arguments\n tool_choice?: ToolChoice;\n // TODO(brian): support response format\n // response_format?: OpenAI.Chat.Completions.ChatCompletionCreateParams['response_format']\n}\n\nexport type LLMModels =\n | OpenAIModels\n | GoogleModels\n | QwenModels\n | KimiModels\n | DeepSeekModels\n | AnyString;\n\nexport interface InferenceLLMOptions {\n model: LLMModels;\n provider?: string;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: ChatCompletionOptions;\n strictToolSchema?: boolean;\n}\n\nexport interface GatewayOptions {\n apiKey: string;\n apiSecret: string;\n}\n\n/**\n * Livekit Cloud Inference LLM\n */\nexport class LLM extends llm.LLM {\n private client: OpenAI;\n private opts: InferenceLLMOptions;\n\n constructor(opts: {\n model: LLMModels;\n provider?: string;\n baseURL?: string;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: InferenceLLMOptions['modelOptions'];\n strictToolSchema?: boolean;\n }) {\n super();\n\n const {\n model,\n provider,\n baseURL,\n apiKey,\n apiSecret,\n modelOptions,\n strictToolSchema = false,\n } = opts;\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n provider,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions: modelOptions || {},\n strictToolSchema,\n };\n\n this.client = new OpenAI({\n baseURL: this.opts.baseURL,\n apiKey: '', // leave a temporary empty string to avoid OpenAI complain about missing key\n });\n }\n\n label(): string {\n return 'inference.LLM';\n }\n\n get model(): string {\n return this.opts.model;\n }\n\n static fromModelString(modelString: string): LLM {\n return new LLM({ model: modelString });\n }\n\n chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n // TODO(AJS-270): Add response_format parameter support\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n // TODO(AJS-270): Add responseFormat parameter\n extraKwargs?: Record<string, unknown>;\n }): LLMStream {\n let modelOptions: Record<string, unknown> = { ...(extraKwargs || {}) };\n\n parallelToolCalls =\n parallelToolCalls !== undefined\n ? parallelToolCalls\n : this.opts.modelOptions.parallel_tool_calls;\n\n if (toolCtx && Object.keys(toolCtx).length > 0 && parallelToolCalls !== undefined) {\n modelOptions.parallel_tool_calls = parallelToolCalls;\n }\n\n toolChoice =\n toolChoice !== undefined\n ? toolChoice\n : (this.opts.modelOptions.tool_choice as llm.ToolChoice | undefined);\n\n if (toolChoice) {\n modelOptions.tool_choice = toolChoice as ToolChoice;\n }\n\n // TODO(AJS-270): Add response_format support here\n\n modelOptions = { ...modelOptions, ...this.opts.modelOptions };\n\n return new LLMStream(this, {\n model: this.opts.model,\n provider: this.opts.provider,\n client: this.client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema: this.opts.strictToolSchema ?? false, // default to false if not set\n gatewayOptions: {\n apiKey: this.opts.apiKey,\n apiSecret: this.opts.apiSecret,\n },\n });\n }\n}\n\nexport class LLMStream extends llm.LLMStream {\n private model: LLMModels;\n private provider?: string;\n private providerFmt: llm.ProviderFormat;\n private client: OpenAI;\n private modelOptions: Record<string, unknown>;\n private strictToolSchema: boolean;\n\n private gatewayOptions?: GatewayOptions;\n private toolCallId?: string;\n private toolIndex?: number;\n private fncName?: string;\n private fncRawArguments?: string;\n\n constructor(\n llm: LLM,\n {\n model,\n provider,\n client,\n chatCtx,\n toolCtx,\n gatewayOptions,\n connOptions,\n modelOptions,\n providerFmt,\n strictToolSchema,\n }: {\n model: LLMModels;\n provider?: string;\n client: OpenAI;\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n gatewayOptions?: GatewayOptions;\n connOptions: APIConnectOptions;\n modelOptions: Record<string, unknown>;\n providerFmt?: llm.ProviderFormat;\n strictToolSchema: boolean;\n },\n ) {\n super(llm, { chatCtx, toolCtx, connOptions });\n this.client = client;\n this.gatewayOptions = gatewayOptions;\n this.provider = provider;\n this.providerFmt = providerFmt || 'openai';\n this.modelOptions = modelOptions;\n this.model = model;\n this.strictToolSchema = strictToolSchema;\n }\n\n protected async run(): Promise<void> {\n // current function call that we're waiting for full completion (args are streamed)\n // (defined inside the run method to make sure the state is reset for each run/attempt)\n let retryable = true;\n this.toolCallId = this.fncName = this.fncRawArguments = this.toolIndex = undefined;\n\n try {\n const messages = (await this.chatCtx.toProviderFormat(\n this.providerFmt,\n )) as OpenAI.ChatCompletionMessageParam[];\n\n const tools = this.toolCtx\n ? Object.entries(this.toolCtx).map(([name, func]) => {\n const oaiParams = {\n type: 'function' as const,\n function: {\n name,\n description: func.description,\n parameters: llm.toJsonSchema(\n func.parameters,\n true,\n this.strictToolSchema,\n ) as unknown as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function']['parameters'],\n } as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function'],\n };\n\n if (this.strictToolSchema) {\n oaiParams.function.strict = true;\n }\n\n return oaiParams;\n })\n : undefined;\n\n const requestOptions: Record<string, unknown> = { ...this.modelOptions };\n if (!tools) {\n delete requestOptions.tool_choice;\n }\n\n // Dynamically set the access token for the LiveKit Agent Gateway API\n if (this.gatewayOptions) {\n this.client.apiKey = await createAccessToken(\n this.gatewayOptions.apiKey,\n this.gatewayOptions.apiSecret,\n );\n }\n\n if (this.provider) {\n const extraHeaders = requestOptions.extra_headers\n ? (requestOptions.extra_headers as Record<string, string>)\n : {};\n extraHeaders['X-LiveKit-Inference-Provider'] = this.provider;\n requestOptions.extra_headers = extraHeaders;\n }\n\n const stream = await this.client.chat.completions.create(\n {\n model: this.model,\n messages,\n tools,\n stream: true,\n stream_options: { include_usage: true },\n ...requestOptions,\n },\n {\n timeout: this.connOptions.timeoutMs,\n },\n );\n\n for await (const chunk of stream) {\n for (const choice of chunk.choices) {\n if (this.abortController.signal.aborted) {\n break;\n }\n const chatChunk = this.parseChoice(chunk.id, choice);\n if (chatChunk) {\n retryable = false;\n this.queue.put(chatChunk);\n }\n }\n\n if (chunk.usage) {\n const usage = chunk.usage;\n retryable = false;\n this.queue.put({\n id: chunk.id,\n usage: {\n completionTokens: usage.completion_tokens,\n promptTokens: usage.prompt_tokens,\n promptCachedTokens: usage.prompt_tokens_details?.cached_tokens || 0,\n totalTokens: usage.total_tokens,\n },\n });\n }\n }\n } catch (error) {\n if (error instanceof OpenAI.APIConnectionTimeoutError) {\n throw new APITimeoutError({ options: { retryable } });\n } else if (error instanceof OpenAI.APIError) {\n throw new APIStatusError({\n message: error.message,\n options: {\n statusCode: error.status,\n body: error.error,\n requestId: error.requestID,\n retryable,\n },\n });\n } else {\n throw new APIConnectionError({\n message: toError(error).message,\n options: { retryable },\n });\n }\n }\n }\n\n private parseChoice(\n id: string,\n choice: OpenAI.ChatCompletionChunk.Choice,\n ): llm.ChatChunk | undefined {\n const delta = choice.delta;\n\n // https://github.com/livekit/agents/issues/688\n // the delta can be None when using Azure OpenAI (content filtering)\n if (delta === undefined) return undefined;\n\n if (delta.tool_calls) {\n // check if we have functions to calls\n for (const tool of delta.tool_calls) {\n if (!tool.function) {\n continue; // oai may add other tools in the future\n }\n\n /**\n * The way OpenAI streams tool calls is a bit tricky.\n *\n * For any new tool call, it first emits a delta tool call with id, and function name,\n * the rest of the delta chunks will only stream the remaining arguments string,\n * until a new tool call is started or the tool call is finished.\n * See below for an example.\n *\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)\n * [ChoiceDeltaToolCall(index=0, id='call_LaVeHWUHpef9K1sd5UO8TtLg', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"P', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris\\}', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id='call_ThU4OmMdQXnnVmpXGOCknXIB', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"T', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='okyo', name=None), type=None)]\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='tool_calls', index=0, logprobs=None)\n */\n let callChunk: llm.ChatChunk | undefined;\n // If we have a previous tool call and this is a new one, emit the previous\n if (this.toolCallId && tool.id && tool.index !== this.toolIndex) {\n callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n }\n\n // Start or continue building the current tool call\n if (tool.function.name) {\n this.toolIndex = tool.index;\n this.toolCallId = tool.id;\n this.fncName = tool.function.name;\n this.fncRawArguments = tool.function.arguments || '';\n } else if (tool.function.arguments) {\n this.fncRawArguments = (this.fncRawArguments || '') + tool.function.arguments;\n }\n\n if (callChunk) {\n return callChunk;\n }\n }\n }\n\n // If we're done with tool calls, emit the final one\n if (\n choice.finish_reason &&\n ['tool_calls', 'stop'].includes(choice.finish_reason) &&\n this.toolCallId !== undefined\n ) {\n const callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n return callChunk;\n }\n\n // Regular content message\n if (!delta.content) {\n return undefined;\n }\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content,\n },\n };\n }\n\n private createRunningToolCallChunk(\n id: string,\n delta: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta,\n ): llm.ChatChunk {\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n toolCalls: [\n llm.FunctionCall.create({\n callId: this.toolCallId || '',\n name: this.fncName || '',\n args: this.fncRawArguments || '',\n }),\n ],\n },\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,oBAAmB;AACnB,eAOO;AACP,UAAqB;AAErB,mBAAkD;AAElD,MAAM,mBAAmB;AAqFlB,MAAM,YAAY,IAAI,IAAI;AAAA,EACvB;AAAA,EACA;AAAA,EAER,YAAY,MAQT;AACD,UAAM;AAEN,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,IACrB,IAAI;AAEJ,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,cAAc,gBAAgB,CAAC;AAAA,MAC/B;AAAA,IACF;AAEA,SAAK,SAAS,IAAI,cAAAA,QAAO;AAAA,MACvB,SAAS,KAAK,KAAK;AAAA,MACnB,QAAQ;AAAA;AAAA,IACV,CAAC;AAAA,EACH;AAAA,EAEA,QAAgB;AACd,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA,EAEA,OAAO,gBAAgB,aAA0B;AAC/C,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,KAAK;AAAA,IACH;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA;AAAA,IAEA;AAAA,EACF,GAQc;AACZ,QAAI,eAAwC,EAAE,GAAI,eAAe,CAAC,EAAG;AAErE,wBACE,sBAAsB,SAClB,oBACA,KAAK,KAAK,aAAa;AAE7B,QAAI,WAAW,OAAO,KAAK,OAAO,EAAE,SAAS,KAAK,sBAAsB,QAAW;AACjF,mBAAa,sBAAsB;AAAA,IACrC;AAEA,iBACE,eAAe,SACX,aACC,KAAK,KAAK,aAAa;AAE9B,QAAI,YAAY;AACd,mBAAa,cAAc;AAAA,IAC7B;AAIA,mBAAe,EAAE,GAAG,cAAc,GAAG,KAAK,KAAK,aAAa;AAE5D,WAAO,IAAI,UAAU,MAAM;AAAA,MACzB,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU,KAAK,KAAK;AAAA,MACpB,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB,KAAK,KAAK,oBAAoB;AAAA;AAAA,MAChD,gBAAgB;AAAA,QACd,QAAQ,KAAK,KAAK;AAAA,QAClB,WAAW,KAAK,KAAK;AAAA,MACvB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,MAAM,kBAAkB,IAAI,UAAU;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACEC,MACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAYA;AACA,UAAMA,MAAK,EAAE,SAAS,SAAS,YAAY,CAAC;AAC5C,SAAK,SAAS;AACd,SAAK,iBAAiB;AACtB,SAAK,WAAW;AAChB,SAAK,cAAc,eAAe;AAClC,SAAK,eAAe;AACpB,SAAK,QAAQ;AACb,SAAK,mBAAmB;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AAlRvC;AAqRI,QAAI,YAAY;AAChB,SAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB,KAAK,YAAY;AAEzE,QAAI;AACF,YAAM,WAAY,MAAM,KAAK,QAAQ;AAAA,QACnC,KAAK;AAAA,MACP;AAEA,YAAM,QAAQ,KAAK,UACf,OAAO,QAAQ,KAAK,OAAO,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM;AACjD,cAAM,YAAY;AAAA,UAChB,MAAM;AAAA,UACN,UAAU;AAAA,YACR;AAAA,YACA,aAAa,KAAK;AAAA,YAClB,YAAY,IAAI;AAAA,cACd,KAAK;AAAA,cACL;AAAA,cACA,KAAK;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,kBAAkB;AACzB,oBAAU,SAAS,SAAS;AAAA,QAC9B;AAEA,eAAO;AAAA,MACT,CAAC,IACD;AAEJ,YAAM,iBAA0C,EAAE,GAAG,KAAK,aAAa;AACvE,UAAI,CAAC,OAAO;AACV,eAAO,eAAe;AAAA,MACxB;AAGA,UAAI,KAAK,gBAAgB;AACvB,aAAK,OAAO,SAAS,UAAM;AAAA,UACzB,KAAK,eAAe;AAAA,UACpB,KAAK,eAAe;AAAA,QACtB;AAAA,MACF;AAEA,UAAI,KAAK,UAAU;AACjB,cAAM,eAAe,eAAe,gBAC/B,eAAe,gBAChB,CAAC;AACL,qBAAa,8BAA8B,IAAI,KAAK;AACpD,uBAAe,gBAAgB;AAAA,MACjC;AAEA,YAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY;AAAA,QAChD;AAAA,UACE,OAAO,KAAK;AAAA,UACZ;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,UACtC,GAAG;AAAA,QACL;AAAA,QACA;AAAA,UACE,SAAS,KAAK,YAAY;AAAA,QAC5B;AAAA,MACF;AAEA,uBAAiB,SAAS,QAAQ;AAChC,mBAAW,UAAU,MAAM,SAAS;AAClC,cAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,UACF;AACA,gBAAM,YAAY,KAAK,YAAY,MAAM,IAAI,MAAM;AACnD,cAAI,WAAW;AACb,wBAAY;AACZ,iBAAK,MAAM,IAAI,SAAS;AAAA,UAC1B;AAAA,QACF;AAEA,YAAI,MAAM,OAAO;AACf,gBAAM,QAAQ,MAAM;AACpB,sBAAY;AACZ,eAAK,MAAM,IAAI;AAAA,YACb,IAAI,MAAM;AAAA,YACV,OAAO;AAAA,cACL,kBAAkB,MAAM;AAAA,cACxB,cAAc,MAAM;AAAA,cACpB,sBAAoB,WAAM,0BAAN,mBAA6B,kBAAiB;AAAA,cAClE,aAAa,MAAM;AAAA,YACrB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB,cAAAD,QAAO,2BAA2B;AACrD,cAAM,IAAI,yBAAgB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AAAA,MACtD,WAAW,iBAAiB,cAAAA,QAAO,UAAU;AAC3C,cAAM,IAAI,wBAAe;AAAA,UACvB,SAAS,MAAM;AAAA,UACf,SAAS;AAAA,YACP,YAAY,MAAM;AAAA,YAClB,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,YACjB;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,cAAM,IAAI,4BAAmB;AAAA,UAC3B,aAAS,kBAAQ,KAAK,EAAE;AAAA,UACxB,SAAS,EAAE,UAAU;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,YACN,IACA,QAC2B;AAC3B,UAAM,QAAQ,OAAO;AAIrB,QAAI,UAAU,OAAW,QAAO;AAEhC,QAAI,MAAM,YAAY;AAEpB,iBAAW,QAAQ,MAAM,YAAY;AACnC,YAAI,CAAC,KAAK,UAAU;AAClB;AAAA,QACF;AAmBA,YAAI;AAEJ,YAAI,KAAK,cAAc,KAAK,MAAM,KAAK,UAAU,KAAK,WAAW;AAC/D,sBAAY,KAAK,2BAA2B,IAAI,KAAK;AACrD,eAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AAAA,QAC1D;AAGA,YAAI,KAAK,SAAS,MAAM;AACtB,eAAK,YAAY,KAAK;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,UAAU,KAAK,SAAS;AAC7B,eAAK,kBAAkB,KAAK,SAAS,aAAa;AAAA,QACpD,WAAW,KAAK,SAAS,WAAW;AAClC,eAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK,SAAS;AAAA,QACtE;AAEA,YAAI,WAAW;AACb,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAGA,QACE,OAAO,iBACP,CAAC,cAAc,MAAM,EAAE,SAAS,OAAO,aAAa,KACpD,KAAK,eAAe,QACpB;AACA,YAAM,YAAY,KAAK,2BAA2B,IAAI,KAAK;AAC3D,WAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AACxD,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,MAAM,SAAS;AAClB,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,2BACN,IACA,OACe;AACf,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,WAAW;AAAA,UACT,IAAI,aAAa,OAAO;AAAA,YACtB,QAAQ,KAAK,cAAc;AAAA,YAC3B,MAAM,KAAK,WAAW;AAAA,YACtB,MAAM,KAAK,mBAAmB;AAAA,UAChC,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["OpenAI","llm"]}
1
+ {"version":3,"sources":["../../src/inference/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport OpenAI from 'openai';\nimport {\n APIConnectionError,\n APIStatusError,\n APITimeoutError,\n DEFAULT_API_CONNECT_OPTIONS,\n type Expand,\n toError,\n} from '../index.js';\nimport * as llm from '../llm/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { type AnyString, createAccessToken } from './utils.js';\n\nconst DEFAULT_BASE_URL = 'https://agent-gateway.livekit.cloud/v1';\n\nexport type OpenAIModels =\n | 'openai/gpt-5'\n | 'openai/gpt-5-mini'\n | 'openai/gpt-5-nano'\n | 'openai/gpt-4.1'\n | 'openai/gpt-4.1-mini'\n | 'openai/gpt-4.1-nano'\n | 'openai/gpt-4o'\n | 'openai/gpt-4o-mini'\n | 'openai/gpt-oss-120b';\n\nexport type GoogleModels =\n | 'google/gemini-3-pro-preview'\n | 'google/gemini-3-flash-preview'\n | 'google/gemini-2.5-pro'\n | 'google/gemini-2.5-flash'\n | 'google/gemini-2.5-flash-lite'\n | 'google/gemini-2.0-flash'\n | 'google/gemini-2.0-flash-lite';\n\nexport type QwenModels = 'qwen/qwen3-235b-a22b-instruct';\n\nexport type KimiModels = 'moonshotai/kimi-k2-instruct';\n\nexport type DeepSeekModels = 'deepseek-ai/deepseek-v3';\n\ntype ChatCompletionPredictionContentParam =\n Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;\ntype WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;\ntype ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;\ntype Verbosity = 'low' | 'medium' | 'high';\n\nexport interface ChatCompletionOptions extends Record<string, unknown> {\n frequency_penalty?: number;\n logit_bias?: Record<string, number>;\n logprobs?: boolean;\n max_completion_tokens?: number;\n max_tokens?: number;\n metadata?: Record<string, string>;\n modalities?: Array<'text' | 'audio'>;\n n?: number;\n parallel_tool_calls?: boolean;\n prediction?: ChatCompletionPredictionContentParam | null;\n presence_penalty?: number;\n prompt_cache_key?: string;\n reasoning_effort?: 'minimal' | 'low' | 'medium' | 'high';\n safety_identifier?: string;\n seed?: number;\n service_tier?: 'auto' | 'default' | 'flex' | 'scale' | 'priority';\n stop?: string | string[];\n store?: boolean;\n temperature?: number;\n top_logprobs?: number;\n top_p?: number;\n user?: string;\n verbosity?: Verbosity;\n web_search_options?: WebSearchOptions;\n\n // livekit-typed arguments\n tool_choice?: ToolChoice;\n // TODO(brian): support response format\n // response_format?: OpenAI.Chat.Completions.ChatCompletionCreateParams['response_format']\n}\n\nexport type LLMModels =\n | OpenAIModels\n | GoogleModels\n | QwenModels\n | KimiModels\n | DeepSeekModels\n | AnyString;\n\nexport interface InferenceLLMOptions {\n model: LLMModels;\n provider?: string;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: ChatCompletionOptions;\n strictToolSchema?: boolean;\n}\n\nexport interface GatewayOptions {\n apiKey: string;\n apiSecret: string;\n}\n\n/**\n * Livekit Cloud Inference LLM\n */\nexport class LLM extends llm.LLM {\n private client: OpenAI;\n private opts: InferenceLLMOptions;\n\n constructor(opts: {\n model: LLMModels;\n provider?: string;\n baseURL?: string;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: InferenceLLMOptions['modelOptions'];\n strictToolSchema?: boolean;\n }) {\n super();\n\n const {\n model,\n provider,\n baseURL,\n apiKey,\n apiSecret,\n modelOptions,\n strictToolSchema = false,\n } = opts;\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n provider,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions: modelOptions || {},\n strictToolSchema,\n };\n\n this.client = new OpenAI({\n baseURL: this.opts.baseURL,\n apiKey: '', // leave a temporary empty string to avoid OpenAI complain about missing key\n });\n }\n\n label(): string {\n return 'inference.LLM';\n }\n\n get model(): string {\n return this.opts.model;\n }\n\n static fromModelString(modelString: string): LLM {\n return new LLM({ model: modelString });\n }\n\n chat({\n chatCtx,\n toolCtx,\n connOptions = DEFAULT_API_CONNECT_OPTIONS,\n parallelToolCalls,\n toolChoice,\n // TODO(AJS-270): Add response_format parameter support\n extraKwargs,\n }: {\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: llm.ToolChoice;\n // TODO(AJS-270): Add responseFormat parameter\n extraKwargs?: Record<string, unknown>;\n }): LLMStream {\n let modelOptions: Record<string, unknown> = { ...(extraKwargs || {}) };\n\n parallelToolCalls =\n parallelToolCalls !== undefined\n ? parallelToolCalls\n : this.opts.modelOptions.parallel_tool_calls;\n\n if (toolCtx && Object.keys(toolCtx).length > 0 && parallelToolCalls !== undefined) {\n modelOptions.parallel_tool_calls = parallelToolCalls;\n }\n\n toolChoice =\n toolChoice !== undefined\n ? toolChoice\n : (this.opts.modelOptions.tool_choice as llm.ToolChoice | undefined);\n\n if (toolChoice) {\n modelOptions.tool_choice = toolChoice as ToolChoice;\n }\n\n // TODO(AJS-270): Add response_format support here\n\n modelOptions = { ...modelOptions, ...this.opts.modelOptions };\n\n return new LLMStream(this, {\n model: this.opts.model,\n provider: this.opts.provider,\n client: this.client,\n chatCtx,\n toolCtx,\n connOptions,\n modelOptions,\n strictToolSchema: this.opts.strictToolSchema ?? false, // default to false if not set\n gatewayOptions: {\n apiKey: this.opts.apiKey,\n apiSecret: this.opts.apiSecret,\n },\n });\n }\n}\n\nexport class LLMStream extends llm.LLMStream {\n private model: LLMModels;\n private provider?: string;\n private providerFmt: llm.ProviderFormat;\n private client: OpenAI;\n private modelOptions: Record<string, unknown>;\n private strictToolSchema: boolean;\n\n private gatewayOptions?: GatewayOptions;\n private toolCallId?: string;\n private toolIndex?: number;\n private fncName?: string;\n private fncRawArguments?: string;\n private toolExtra?: Record<string, unknown>;\n\n constructor(\n llm: LLM,\n {\n model,\n provider,\n client,\n chatCtx,\n toolCtx,\n gatewayOptions,\n connOptions,\n modelOptions,\n providerFmt,\n strictToolSchema,\n }: {\n model: LLMModels;\n provider?: string;\n client: OpenAI;\n chatCtx: llm.ChatContext;\n toolCtx?: llm.ToolContext;\n gatewayOptions?: GatewayOptions;\n connOptions: APIConnectOptions;\n modelOptions: Record<string, unknown>;\n providerFmt?: llm.ProviderFormat;\n strictToolSchema: boolean;\n },\n ) {\n super(llm, { chatCtx, toolCtx, connOptions });\n this.client = client;\n this.gatewayOptions = gatewayOptions;\n this.provider = provider;\n this.providerFmt = providerFmt || 'openai';\n this.modelOptions = modelOptions;\n this.model = model;\n this.strictToolSchema = strictToolSchema;\n }\n\n protected async run(): Promise<void> {\n // current function call that we're waiting for full completion (args are streamed)\n // (defined inside the run method to make sure the state is reset for each run/attempt)\n let retryable = true;\n this.toolCallId = this.fncName = this.fncRawArguments = this.toolIndex = undefined;\n this.toolExtra = undefined;\n\n try {\n const messages = (await this.chatCtx.toProviderFormat(\n this.providerFmt,\n )) as OpenAI.ChatCompletionMessageParam[];\n\n const tools = this.toolCtx\n ? Object.entries(this.toolCtx).map(([name, func]) => {\n const oaiParams = {\n type: 'function' as const,\n function: {\n name,\n description: func.description,\n parameters: llm.toJsonSchema(\n func.parameters,\n true,\n this.strictToolSchema,\n ) as unknown as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function']['parameters'],\n } as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function'],\n };\n\n if (this.strictToolSchema) {\n oaiParams.function.strict = true;\n }\n\n return oaiParams;\n })\n : undefined;\n\n const requestOptions: Record<string, unknown> = { ...this.modelOptions };\n if (!tools) {\n delete requestOptions.tool_choice;\n }\n\n // Dynamically set the access token for the LiveKit Agent Gateway API\n if (this.gatewayOptions) {\n this.client.apiKey = await createAccessToken(\n this.gatewayOptions.apiKey,\n this.gatewayOptions.apiSecret,\n );\n }\n\n if (this.provider) {\n const extraHeaders = requestOptions.extra_headers\n ? (requestOptions.extra_headers as Record<string, string>)\n : {};\n extraHeaders['X-LiveKit-Inference-Provider'] = this.provider;\n requestOptions.extra_headers = extraHeaders;\n }\n\n const stream = await this.client.chat.completions.create(\n {\n model: this.model,\n messages,\n tools,\n stream: true,\n stream_options: { include_usage: true },\n ...requestOptions,\n },\n {\n timeout: this.connOptions.timeoutMs,\n },\n );\n\n for await (const chunk of stream) {\n for (const choice of chunk.choices) {\n if (this.abortController.signal.aborted) {\n break;\n }\n const chatChunk = this.parseChoice(chunk.id, choice);\n if (chatChunk) {\n retryable = false;\n this.queue.put(chatChunk);\n }\n }\n\n if (chunk.usage) {\n const usage = chunk.usage;\n retryable = false;\n this.queue.put({\n id: chunk.id,\n usage: {\n completionTokens: usage.completion_tokens,\n promptTokens: usage.prompt_tokens,\n promptCachedTokens: usage.prompt_tokens_details?.cached_tokens || 0,\n totalTokens: usage.total_tokens,\n },\n });\n }\n }\n } catch (error) {\n if (error instanceof OpenAI.APIConnectionTimeoutError) {\n throw new APITimeoutError({ options: { retryable } });\n } else if (error instanceof OpenAI.APIError) {\n throw new APIStatusError({\n message: error.message,\n options: {\n statusCode: error.status,\n body: error.error,\n requestId: error.requestID,\n retryable,\n },\n });\n } else {\n throw new APIConnectionError({\n message: toError(error).message,\n options: { retryable },\n });\n }\n }\n }\n\n private parseChoice(\n id: string,\n choice: OpenAI.ChatCompletionChunk.Choice,\n ): llm.ChatChunk | undefined {\n const delta = choice.delta;\n\n // https://github.com/livekit/agents/issues/688\n // the delta can be None when using Azure OpenAI (content filtering)\n if (delta === undefined) return undefined;\n\n if (delta.tool_calls) {\n // check if we have functions to calls\n for (const tool of delta.tool_calls) {\n if (!tool.function) {\n continue; // oai may add other tools in the future\n }\n\n /**\n * The way OpenAI streams tool calls is a bit tricky.\n *\n * For any new tool call, it first emits a delta tool call with id, and function name,\n * the rest of the delta chunks will only stream the remaining arguments string,\n * until a new tool call is started or the tool call is finished.\n * See below for an example.\n *\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)\n * [ChoiceDeltaToolCall(index=0, id='call_LaVeHWUHpef9K1sd5UO8TtLg', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"P', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris\\}', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id='call_ThU4OmMdQXnnVmpXGOCknXIB', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='\\{\"location\": \"T', name=None), type=None)]\n * [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='okyo', name=None), type=None)]\n * Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='tool_calls', index=0, logprobs=None)\n */\n let callChunk: llm.ChatChunk | undefined;\n // If we have a previous tool call and this is a new one, emit the previous\n if (this.toolCallId && tool.id && tool.index !== this.toolIndex) {\n callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n this.toolExtra = undefined;\n }\n\n // Start or continue building the current tool call\n if (tool.function.name) {\n this.toolIndex = tool.index;\n this.toolCallId = tool.id;\n this.fncName = tool.function.name;\n this.fncRawArguments = tool.function.arguments || '';\n // Extract extra from tool call (e.g., Google thought signatures)\n this.toolExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((tool as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n } else if (tool.function.arguments) {\n this.fncRawArguments = (this.fncRawArguments || '') + tool.function.arguments;\n }\n\n if (callChunk) {\n return callChunk;\n }\n }\n }\n\n // If we're done with tool calls, emit the final one\n if (\n choice.finish_reason &&\n ['tool_calls', 'stop'].includes(choice.finish_reason) &&\n this.toolCallId !== undefined\n ) {\n const callChunk = this.createRunningToolCallChunk(id, delta);\n this.toolCallId = this.fncName = this.fncRawArguments = undefined;\n this.toolExtra = undefined;\n return callChunk;\n }\n\n // Extract extra from delta (e.g., Google thought signatures on text parts)\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n // Regular content message\n if (!delta.content && !deltaExtra) {\n return undefined;\n }\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n },\n };\n }\n\n private createRunningToolCallChunk(\n id: string,\n delta: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta,\n ): llm.ChatChunk {\n const toolExtra = this.toolExtra ? { ...this.toolExtra } : {};\n const thoughtSignature = this.extractThoughtSignature(toolExtra);\n const deltaExtra =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ((delta as any).extra_content as Record<string, unknown> | undefined) ?? undefined;\n\n return {\n id,\n delta: {\n role: 'assistant',\n content: delta.content || undefined,\n extra: deltaExtra,\n toolCalls: [\n llm.FunctionCall.create({\n callId: this.toolCallId || '',\n name: this.fncName || '',\n args: this.fncRawArguments || '',\n extra: toolExtra,\n thoughtSignature,\n }),\n ],\n },\n };\n }\n\n private extractThoughtSignature(extra?: Record<string, unknown>): string | undefined {\n const googleExtra = extra?.google;\n if (googleExtra && typeof googleExtra === 'object') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (googleExtra as any).thoughtSignature || (googleExtra as any).thought_signature;\n }\n return undefined;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,oBAAmB;AACnB,eAOO;AACP,UAAqB;AAErB,mBAAkD;AAElD,MAAM,mBAAmB;AA4FlB,MAAM,YAAY,IAAI,IAAI;AAAA,EACvB;AAAA,EACA;AAAA,EAER,YAAY,MAQT;AACD,UAAM;AAEN,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,IACrB,IAAI;AAEJ,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,cAAc,gBAAgB,CAAC;AAAA,MAC/B;AAAA,IACF;AAEA,SAAK,SAAS,IAAI,cAAAA,QAAO;AAAA,MACvB,SAAS,KAAK,KAAK;AAAA,MACnB,QAAQ;AAAA;AAAA,IACV,CAAC;AAAA,EACH;AAAA,EAEA,QAAgB;AACd,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA,EAEA,OAAO,gBAAgB,aAA0B;AAC/C,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,KAAK;AAAA,IACH;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA;AAAA,IAEA;AAAA,EACF,GAQc;AACZ,QAAI,eAAwC,EAAE,GAAI,eAAe,CAAC,EAAG;AAErE,wBACE,sBAAsB,SAClB,oBACA,KAAK,KAAK,aAAa;AAE7B,QAAI,WAAW,OAAO,KAAK,OAAO,EAAE,SAAS,KAAK,sBAAsB,QAAW;AACjF,mBAAa,sBAAsB;AAAA,IACrC;AAEA,iBACE,eAAe,SACX,aACC,KAAK,KAAK,aAAa;AAE9B,QAAI,YAAY;AACd,mBAAa,cAAc;AAAA,IAC7B;AAIA,mBAAe,EAAE,GAAG,cAAc,GAAG,KAAK,KAAK,aAAa;AAE5D,WAAO,IAAI,UAAU,MAAM;AAAA,MACzB,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU,KAAK,KAAK;AAAA,MACpB,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB,KAAK,KAAK,oBAAoB;AAAA;AAAA,MAChD,gBAAgB;AAAA,QACd,QAAQ,KAAK,KAAK;AAAA,QAClB,WAAW,KAAK,KAAK;AAAA,MACvB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,MAAM,kBAAkB,IAAI,UAAU;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACEC,MACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAYA;AACA,UAAMA,MAAK,EAAE,SAAS,SAAS,YAAY,CAAC;AAC5C,SAAK,SAAS;AACd,SAAK,iBAAiB;AACtB,SAAK,WAAW;AAChB,SAAK,cAAc,eAAe;AAClC,SAAK,eAAe;AACpB,SAAK,QAAQ;AACb,SAAK,mBAAmB;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AA1RvC;AA6RI,QAAI,YAAY;AAChB,SAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB,KAAK,YAAY;AACzE,SAAK,YAAY;AAEjB,QAAI;AACF,YAAM,WAAY,MAAM,KAAK,QAAQ;AAAA,QACnC,KAAK;AAAA,MACP;AAEA,YAAM,QAAQ,KAAK,UACf,OAAO,QAAQ,KAAK,OAAO,EAAE,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM;AACjD,cAAM,YAAY;AAAA,UAChB,MAAM;AAAA,UACN,UAAU;AAAA,YACR;AAAA,YACA,aAAa,KAAK;AAAA,YAClB,YAAY,IAAI;AAAA,cACd,KAAK;AAAA,cACL;AAAA,cACA,KAAK;AAAA,YACP;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,kBAAkB;AACzB,oBAAU,SAAS,SAAS;AAAA,QAC9B;AAEA,eAAO;AAAA,MACT,CAAC,IACD;AAEJ,YAAM,iBAA0C,EAAE,GAAG,KAAK,aAAa;AACvE,UAAI,CAAC,OAAO;AACV,eAAO,eAAe;AAAA,MACxB;AAGA,UAAI,KAAK,gBAAgB;AACvB,aAAK,OAAO,SAAS,UAAM;AAAA,UACzB,KAAK,eAAe;AAAA,UACpB,KAAK,eAAe;AAAA,QACtB;AAAA,MACF;AAEA,UAAI,KAAK,UAAU;AACjB,cAAM,eAAe,eAAe,gBAC/B,eAAe,gBAChB,CAAC;AACL,qBAAa,8BAA8B,IAAI,KAAK;AACpD,uBAAe,gBAAgB;AAAA,MACjC;AAEA,YAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY;AAAA,QAChD;AAAA,UACE,OAAO,KAAK;AAAA,UACZ;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR,gBAAgB,EAAE,eAAe,KAAK;AAAA,UACtC,GAAG;AAAA,QACL;AAAA,QACA;AAAA,UACE,SAAS,KAAK,YAAY;AAAA,QAC5B;AAAA,MACF;AAEA,uBAAiB,SAAS,QAAQ;AAChC,mBAAW,UAAU,MAAM,SAAS;AAClC,cAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,UACF;AACA,gBAAM,YAAY,KAAK,YAAY,MAAM,IAAI,MAAM;AACnD,cAAI,WAAW;AACb,wBAAY;AACZ,iBAAK,MAAM,IAAI,SAAS;AAAA,UAC1B;AAAA,QACF;AAEA,YAAI,MAAM,OAAO;AACf,gBAAM,QAAQ,MAAM;AACpB,sBAAY;AACZ,eAAK,MAAM,IAAI;AAAA,YACb,IAAI,MAAM;AAAA,YACV,OAAO;AAAA,cACL,kBAAkB,MAAM;AAAA,cACxB,cAAc,MAAM;AAAA,cACpB,sBAAoB,WAAM,0BAAN,mBAA6B,kBAAiB;AAAA,cAClE,aAAa,MAAM;AAAA,YACrB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB,cAAAD,QAAO,2BAA2B;AACrD,cAAM,IAAI,yBAAgB,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AAAA,MACtD,WAAW,iBAAiB,cAAAA,QAAO,UAAU;AAC3C,cAAM,IAAI,wBAAe;AAAA,UACvB,SAAS,MAAM;AAAA,UACf,SAAS;AAAA,YACP,YAAY,MAAM;AAAA,YAClB,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,YACjB;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,cAAM,IAAI,4BAAmB;AAAA,UAC3B,aAAS,kBAAQ,KAAK,EAAE;AAAA,UACxB,SAAS,EAAE,UAAU;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,YACN,IACA,QAC2B;AAC3B,UAAM,QAAQ,OAAO;AAIrB,QAAI,UAAU,OAAW,QAAO;AAEhC,QAAI,MAAM,YAAY;AAEpB,iBAAW,QAAQ,MAAM,YAAY;AACnC,YAAI,CAAC,KAAK,UAAU;AAClB;AAAA,QACF;AAmBA,YAAI;AAEJ,YAAI,KAAK,cAAc,KAAK,MAAM,KAAK,UAAU,KAAK,WAAW;AAC/D,sBAAY,KAAK,2BAA2B,IAAI,KAAK;AACrD,eAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AACxD,eAAK,YAAY;AAAA,QACnB;AAGA,YAAI,KAAK,SAAS,MAAM;AACtB,eAAK,YAAY,KAAK;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,UAAU,KAAK,SAAS;AAC7B,eAAK,kBAAkB,KAAK,SAAS,aAAa;AAElD,eAAK;AAAA,UAED,KAAa,iBAAyD;AAAA,QAC5E,WAAW,KAAK,SAAS,WAAW;AAClC,eAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK,SAAS;AAAA,QACtE;AAEA,YAAI,WAAW;AACb,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAGA,QACE,OAAO,iBACP,CAAC,cAAc,MAAM,EAAE,SAAS,OAAO,aAAa,KACpD,KAAK,eAAe,QACpB;AACA,YAAM,YAAY,KAAK,2BAA2B,IAAI,KAAK;AAC3D,WAAK,aAAa,KAAK,UAAU,KAAK,kBAAkB;AACxD,WAAK,YAAY;AACjB,aAAO;AAAA,IACT;AAGA,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAG3E,QAAI,CAAC,MAAM,WAAW,CAAC,YAAY;AACjC,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,2BACN,IACA,OACe;AACf,UAAM,YAAY,KAAK,YAAY,EAAE,GAAG,KAAK,UAAU,IAAI,CAAC;AAC5D,UAAM,mBAAmB,KAAK,wBAAwB,SAAS;AAC/D,UAAM;AAAA;AAAA,MAEF,MAAc,iBAAyD;AAAA;AAE3E,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,MAAM,WAAW;AAAA,QAC1B,OAAO;AAAA,QACP,WAAW;AAAA,UACT,IAAI,aAAa,OAAO;AAAA,YACtB,QAAQ,KAAK,cAAc;AAAA,YAC3B,MAAM,KAAK,WAAW;AAAA,YACtB,MAAM,KAAK,mBAAmB;AAAA,YAC9B,OAAO;AAAA,YACP;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,wBAAwB,OAAqD;AACnF,UAAM,cAAc,+BAAO;AAC3B,QAAI,eAAe,OAAO,gBAAgB,UAAU;AAElD,aAAQ,YAAoB,oBAAqB,YAAoB;AAAA,IACvE;AACA,WAAO;AAAA,EACT;AACF;","names":["OpenAI","llm"]}
@@ -4,7 +4,7 @@ import * as llm from '../llm/index.js';
4
4
  import type { APIConnectOptions } from '../types.js';
5
5
  import { type AnyString } from './utils.js';
6
6
  export type OpenAIModels = 'openai/gpt-5' | 'openai/gpt-5-mini' | 'openai/gpt-5-nano' | 'openai/gpt-4.1' | 'openai/gpt-4.1-mini' | 'openai/gpt-4.1-nano' | 'openai/gpt-4o' | 'openai/gpt-4o-mini' | 'openai/gpt-oss-120b';
7
- export type GoogleModels = 'google/gemini-2.0-flash-lite';
7
+ export type GoogleModels = 'google/gemini-3-pro-preview' | 'google/gemini-3-flash-preview' | 'google/gemini-2.5-pro' | 'google/gemini-2.5-flash' | 'google/gemini-2.5-flash-lite' | 'google/gemini-2.0-flash' | 'google/gemini-2.0-flash-lite';
8
8
  export type QwenModels = 'qwen/qwen3-235b-a22b-instruct';
9
9
  export type KimiModels = 'moonshotai/kimi-k2-instruct';
10
10
  export type DeepSeekModels = 'deepseek-ai/deepseek-v3';
@@ -92,6 +92,7 @@ export declare class LLMStream extends llm.LLMStream {
92
92
  private toolIndex?;
93
93
  private fncName?;
94
94
  private fncRawArguments?;
95
+ private toolExtra?;
95
96
  constructor(llm: LLM, { model, provider, client, chatCtx, toolCtx, gatewayOptions, connOptions, modelOptions, providerFmt, strictToolSchema, }: {
96
97
  model: LLMModels;
97
98
  provider?: string;
@@ -107,6 +108,7 @@ export declare class LLMStream extends llm.LLMStream {
107
108
  protected run(): Promise<void>;
108
109
  private parseChoice;
109
110
  private createRunningToolCallChunk;
111
+ private extractThoughtSignature;
110
112
  }
111
113
  export {};
112
114
  //# sourceMappingURL=llm.d.ts.map
@@ -4,7 +4,7 @@ import * as llm from '../llm/index.js';
4
4
  import type { APIConnectOptions } from '../types.js';
5
5
  import { type AnyString } from './utils.js';
6
6
  export type OpenAIModels = 'openai/gpt-5' | 'openai/gpt-5-mini' | 'openai/gpt-5-nano' | 'openai/gpt-4.1' | 'openai/gpt-4.1-mini' | 'openai/gpt-4.1-nano' | 'openai/gpt-4o' | 'openai/gpt-4o-mini' | 'openai/gpt-oss-120b';
7
- export type GoogleModels = 'google/gemini-2.0-flash-lite';
7
+ export type GoogleModels = 'google/gemini-3-pro-preview' | 'google/gemini-3-flash-preview' | 'google/gemini-2.5-pro' | 'google/gemini-2.5-flash' | 'google/gemini-2.5-flash-lite' | 'google/gemini-2.0-flash' | 'google/gemini-2.0-flash-lite';
8
8
  export type QwenModels = 'qwen/qwen3-235b-a22b-instruct';
9
9
  export type KimiModels = 'moonshotai/kimi-k2-instruct';
10
10
  export type DeepSeekModels = 'deepseek-ai/deepseek-v3';
@@ -92,6 +92,7 @@ export declare class LLMStream extends llm.LLMStream {
92
92
  private toolIndex?;
93
93
  private fncName?;
94
94
  private fncRawArguments?;
95
+ private toolExtra?;
95
96
  constructor(llm: LLM, { model, provider, client, chatCtx, toolCtx, gatewayOptions, connOptions, modelOptions, providerFmt, strictToolSchema, }: {
96
97
  model: LLMModels;
97
98
  provider?: string;
@@ -107,6 +108,7 @@ export declare class LLMStream extends llm.LLMStream {
107
108
  protected run(): Promise<void>;
108
109
  private parseChoice;
109
110
  private createRunningToolCallChunk;
111
+ private extractThoughtSignature;
110
112
  }
111
113
  export {};
112
114
  //# sourceMappingURL=llm.d.ts.map