@livekit/agents 1.0.40 → 1.0.42

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (128) hide show
  1. package/dist/cli.cjs +20 -18
  2. package/dist/cli.cjs.map +1 -1
  3. package/dist/cli.d.ts.map +1 -1
  4. package/dist/cli.js +20 -18
  5. package/dist/cli.js.map +1 -1
  6. package/dist/index.cjs +5 -0
  7. package/dist/index.cjs.map +1 -1
  8. package/dist/index.d.cts +1 -0
  9. package/dist/index.d.ts +1 -0
  10. package/dist/index.d.ts.map +1 -1
  11. package/dist/index.js +3 -0
  12. package/dist/index.js.map +1 -1
  13. package/dist/inference/stt.cjs +2 -1
  14. package/dist/inference/stt.cjs.map +1 -1
  15. package/dist/inference/stt.d.ts.map +1 -1
  16. package/dist/inference/stt.js +2 -1
  17. package/dist/inference/stt.js.map +1 -1
  18. package/dist/llm/realtime.cjs.map +1 -1
  19. package/dist/llm/realtime.d.cts +5 -1
  20. package/dist/llm/realtime.d.ts +5 -1
  21. package/dist/llm/realtime.d.ts.map +1 -1
  22. package/dist/llm/realtime.js.map +1 -1
  23. package/dist/tts/stream_adapter.cjs +15 -1
  24. package/dist/tts/stream_adapter.cjs.map +1 -1
  25. package/dist/tts/stream_adapter.d.ts.map +1 -1
  26. package/dist/tts/stream_adapter.js +15 -1
  27. package/dist/tts/stream_adapter.js.map +1 -1
  28. package/dist/tts/tts.cjs.map +1 -1
  29. package/dist/tts/tts.d.cts +9 -1
  30. package/dist/tts/tts.d.ts +9 -1
  31. package/dist/tts/tts.d.ts.map +1 -1
  32. package/dist/tts/tts.js.map +1 -1
  33. package/dist/types.cjs +3 -0
  34. package/dist/types.cjs.map +1 -1
  35. package/dist/types.d.cts +4 -0
  36. package/dist/types.d.ts +4 -0
  37. package/dist/types.d.ts.map +1 -1
  38. package/dist/types.js +2 -0
  39. package/dist/types.js.map +1 -1
  40. package/dist/voice/agent.cjs +11 -1
  41. package/dist/voice/agent.cjs.map +1 -1
  42. package/dist/voice/agent.d.cts +7 -3
  43. package/dist/voice/agent.d.ts +7 -3
  44. package/dist/voice/agent.d.ts.map +1 -1
  45. package/dist/voice/agent.js +11 -1
  46. package/dist/voice/agent.js.map +1 -1
  47. package/dist/voice/agent_activity.cjs +30 -14
  48. package/dist/voice/agent_activity.cjs.map +1 -1
  49. package/dist/voice/agent_activity.d.cts +1 -0
  50. package/dist/voice/agent_activity.d.ts +1 -0
  51. package/dist/voice/agent_activity.d.ts.map +1 -1
  52. package/dist/voice/agent_activity.js +30 -14
  53. package/dist/voice/agent_activity.js.map +1 -1
  54. package/dist/voice/agent_session.cjs +5 -1
  55. package/dist/voice/agent_session.cjs.map +1 -1
  56. package/dist/voice/agent_session.d.cts +2 -0
  57. package/dist/voice/agent_session.d.ts +2 -0
  58. package/dist/voice/agent_session.d.ts.map +1 -1
  59. package/dist/voice/agent_session.js +5 -1
  60. package/dist/voice/agent_session.js.map +1 -1
  61. package/dist/voice/audio_recognition.cjs +1 -1
  62. package/dist/voice/audio_recognition.cjs.map +1 -1
  63. package/dist/voice/audio_recognition.d.ts.map +1 -1
  64. package/dist/voice/audio_recognition.js +1 -1
  65. package/dist/voice/audio_recognition.js.map +1 -1
  66. package/dist/voice/background_audio.cjs +2 -1
  67. package/dist/voice/background_audio.cjs.map +1 -1
  68. package/dist/voice/background_audio.d.cts +4 -2
  69. package/dist/voice/background_audio.d.ts +4 -2
  70. package/dist/voice/background_audio.d.ts.map +1 -1
  71. package/dist/voice/background_audio.js +2 -1
  72. package/dist/voice/background_audio.js.map +1 -1
  73. package/dist/voice/generation.cjs +58 -5
  74. package/dist/voice/generation.cjs.map +1 -1
  75. package/dist/voice/generation.d.cts +17 -3
  76. package/dist/voice/generation.d.ts +17 -3
  77. package/dist/voice/generation.d.ts.map +1 -1
  78. package/dist/voice/generation.js +63 -6
  79. package/dist/voice/generation.js.map +1 -1
  80. package/dist/voice/index.cjs.map +1 -1
  81. package/dist/voice/index.d.cts +1 -1
  82. package/dist/voice/index.d.ts +1 -1
  83. package/dist/voice/index.d.ts.map +1 -1
  84. package/dist/voice/index.js.map +1 -1
  85. package/dist/voice/io.cjs +22 -2
  86. package/dist/voice/io.cjs.map +1 -1
  87. package/dist/voice/io.d.cts +21 -5
  88. package/dist/voice/io.d.ts +21 -5
  89. package/dist/voice/io.d.ts.map +1 -1
  90. package/dist/voice/io.js +18 -1
  91. package/dist/voice/io.js.map +1 -1
  92. package/dist/voice/room_io/_output.cjs +3 -2
  93. package/dist/voice/room_io/_output.cjs.map +1 -1
  94. package/dist/voice/room_io/_output.d.cts +3 -3
  95. package/dist/voice/room_io/_output.d.ts +3 -3
  96. package/dist/voice/room_io/_output.d.ts.map +1 -1
  97. package/dist/voice/room_io/_output.js +4 -3
  98. package/dist/voice/room_io/_output.js.map +1 -1
  99. package/dist/voice/transcription/synchronizer.cjs +137 -13
  100. package/dist/voice/transcription/synchronizer.cjs.map +1 -1
  101. package/dist/voice/transcription/synchronizer.d.cts +34 -4
  102. package/dist/voice/transcription/synchronizer.d.ts +34 -4
  103. package/dist/voice/transcription/synchronizer.d.ts.map +1 -1
  104. package/dist/voice/transcription/synchronizer.js +141 -14
  105. package/dist/voice/transcription/synchronizer.js.map +1 -1
  106. package/dist/voice/transcription/synchronizer.test.cjs +151 -0
  107. package/dist/voice/transcription/synchronizer.test.cjs.map +1 -0
  108. package/dist/voice/transcription/synchronizer.test.js +150 -0
  109. package/dist/voice/transcription/synchronizer.test.js.map +1 -0
  110. package/package.json +1 -1
  111. package/src/cli.ts +20 -18
  112. package/src/index.ts +1 -0
  113. package/src/inference/stt.ts +9 -8
  114. package/src/llm/realtime.ts +5 -1
  115. package/src/tts/stream_adapter.ts +23 -1
  116. package/src/tts/tts.ts +10 -1
  117. package/src/types.ts +5 -0
  118. package/src/voice/agent.ts +19 -4
  119. package/src/voice/agent_activity.ts +38 -13
  120. package/src/voice/agent_session.ts +6 -0
  121. package/src/voice/audio_recognition.ts +2 -1
  122. package/src/voice/background_audio.ts +6 -3
  123. package/src/voice/generation.ts +115 -10
  124. package/src/voice/index.ts +1 -1
  125. package/src/voice/io.ts +40 -5
  126. package/src/voice/room_io/_output.ts +6 -5
  127. package/src/voice/transcription/synchronizer.test.ts +206 -0
  128. package/src/voice/transcription/synchronizer.ts +202 -17
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/inference/stt.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { type AudioFrame } from '@livekit/rtc-node';\nimport type { WebSocket } from 'ws';\nimport { APIError, APIStatusError } from '../_exceptions.js';\nimport { AudioByteStream } from '../audio.js';\nimport { log } from '../log.js';\nimport { createStreamChannel } from '../stream/stream_channel.js';\nimport {\n STT as BaseSTT,\n SpeechStream as BaseSpeechStream,\n type SpeechData,\n type SpeechEvent,\n SpeechEventType,\n} from '../stt/index.js';\nimport { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS } from '../types.js';\nimport { type AudioBuffer, Event, Task, cancelAndWait, shortuuid, waitForAbort } from '../utils.js';\nimport type { TimedString } from '../voice/io.js';\nimport {\n type SttServerEvent,\n type SttTranscriptEvent,\n sttServerEventSchema,\n} from './api_protos.js';\nimport { type AnyString, connectWs, createAccessToken } from './utils.js';\n\nexport type DeepgramModels =\n | 'deepgram/flux-general'\n | 'deepgram/nova-3'\n | 'deepgram/nova-3-medical'\n | 'deepgram/nova-2'\n | 'deepgram/nova-2-medical'\n | 'deepgram/nova-2-conversationalai'\n | 'deepgram/nova-2-phonecall';\n\nexport type CartesiaModels = 'cartesia/ink-whisper';\n\nexport type AssemblyaiModels =\n | 'assemblyai/universal-streaming'\n | 'assemblyai/universal-streaming-multilingual';\n\nexport type ElevenlabsSTTModels = 'elevenlabs/scribe_v2_realtime';\n\nexport interface CartesiaOptions {\n min_volume?: number; // default: not specified\n max_silence_duration_secs?: number; // default: not specified\n}\n\nexport interface DeepgramOptions {\n filler_words?: boolean; // default: true\n interim_results?: boolean; // default: true\n endpointing?: number; // default: 25 (ms)\n punctuate?: boolean; // default: false\n smart_format?: boolean;\n keywords?: Array<[string, number]>;\n keyterms?: string[];\n profanity_filter?: boolean;\n numerals?: boolean;\n mip_opt_out?: boolean;\n}\n\nexport interface AssemblyAIOptions {\n format_turns?: boolean; // default: false\n end_of_turn_confidence_threshold?: number; // default: 0.01\n min_end_of_turn_silence_when_confident?: number; // default: 0\n max_turn_silence?: number; // default: not specified\n keyterms_prompt?: string[]; // default: not specified\n}\n\nexport type STTLanguages =\n | 'multi'\n | 'en'\n | 'de'\n | 'es'\n | 'fr'\n | 'ja'\n | 'pt'\n | 'zh'\n | 'hi'\n | AnyString;\n\ntype _STTModels = DeepgramModels | CartesiaModels | AssemblyaiModels | ElevenlabsSTTModels;\n\nexport type STTModels = _STTModels | 'auto' | AnyString;\n\nexport type ModelWithLanguage = `${_STTModels}:${STTLanguages}` | STTModels;\n\nexport type STTOptions<TModel extends STTModels> = TModel extends DeepgramModels\n ? DeepgramOptions\n : TModel extends CartesiaModels\n ? CartesiaOptions\n : TModel extends AssemblyaiModels\n ? AssemblyAIOptions\n : Record<string, unknown>;\n\nexport type STTEncoding = 'pcm_s16le';\n\nconst DEFAULT_ENCODING: STTEncoding = 'pcm_s16le';\nconst DEFAULT_SAMPLE_RATE = 16000;\nconst DEFAULT_BASE_URL = 'wss://agent-gateway.livekit.cloud/v1';\nconst DEFAULT_CANCEL_TIMEOUT = 5000;\n\nexport interface InferenceSTTOptions<TModel extends STTModels> {\n model?: TModel;\n language?: STTLanguages;\n encoding: STTEncoding;\n sampleRate: number;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: STTOptions<TModel>;\n}\n\n/**\n * Livekit Cloud Inference STT\n */\nexport class STT<TModel extends STTModels> extends BaseSTT {\n private opts: InferenceSTTOptions<TModel>;\n private streams: Set<SpeechStream<TModel>> = new Set();\n\n #logger = log();\n\n constructor(opts?: {\n model?: TModel;\n language?: STTLanguages;\n baseURL?: string;\n encoding?: STTEncoding;\n sampleRate?: number;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: STTOptions<TModel>;\n }) {\n super({ streaming: true, interimResults: true, alignedTranscript: 'word' });\n\n const {\n model,\n language,\n baseURL,\n encoding = DEFAULT_ENCODING,\n sampleRate = DEFAULT_SAMPLE_RATE,\n apiKey,\n apiSecret,\n modelOptions = {} as STTOptions<TModel>,\n } = opts || {};\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n language,\n encoding,\n sampleRate,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions,\n };\n }\n\n get label(): string {\n return 'inference.STT';\n }\n\n static fromModelString(modelString: string): STT<AnyString> {\n if (modelString.includes(':')) {\n const [model, language] = modelString.split(':') as [AnyString, STTLanguages];\n return new STT({ model, language });\n }\n return new STT({ model: modelString });\n }\n\n protected async _recognize(_: AudioBuffer): Promise<SpeechEvent> {\n throw new Error('LiveKit STT does not support batch recognition, use stream() instead');\n }\n\n updateOptions(opts: Partial<Pick<InferenceSTTOptions<TModel>, 'model' | 'language'>>): void {\n this.opts = { ...this.opts, ...opts };\n\n for (const stream of this.streams) {\n stream.updateOptions(opts);\n }\n }\n\n stream(options?: {\n language?: STTLanguages | string;\n connOptions?: APIConnectOptions;\n }): SpeechStream<TModel> {\n const { language, connOptions = DEFAULT_API_CONNECT_OPTIONS } = options || {};\n const streamOpts = {\n ...this.opts,\n language: language ?? this.opts.language,\n } as InferenceSTTOptions<TModel>;\n\n const stream = new SpeechStream(this, streamOpts, connOptions);\n this.streams.add(stream);\n\n return stream;\n }\n\n async connectWs(timeout: number): Promise<WebSocket> {\n const params = {\n settings: {\n sample_rate: String(this.opts.sampleRate),\n encoding: this.opts.encoding,\n extra: this.opts.modelOptions,\n },\n } as Record<string, unknown>;\n\n if (this.opts.model && this.opts.model !== 'auto') {\n params.model = this.opts.model;\n }\n\n if (this.opts.language) {\n (params.settings as Record<string, unknown>).language = this.opts.language;\n }\n\n let baseURL = this.opts.baseURL;\n if (baseURL.startsWith('http://') || baseURL.startsWith('https://')) {\n baseURL = baseURL.replace('http', 'ws');\n }\n\n const token = await createAccessToken(this.opts.apiKey, this.opts.apiSecret);\n const url = `${baseURL}/stt`;\n const headers = { Authorization: `Bearer ${token}` } as Record<string, string>;\n\n const socket = await connectWs(url, headers, timeout);\n const msg = { ...params, type: 'session.create' };\n socket.send(JSON.stringify(msg));\n\n return socket;\n }\n}\n\nexport class SpeechStream<TModel extends STTModels> extends BaseSpeechStream {\n private opts: InferenceSTTOptions<TModel>;\n private requestId = shortuuid('stt_request_');\n private speaking = false;\n private speechDuration = 0;\n private reconnectEvent = new Event();\n private stt: STT<TModel>;\n private connOptions: APIConnectOptions;\n\n #logger = log();\n\n constructor(\n sttImpl: STT<TModel>,\n opts: InferenceSTTOptions<TModel>,\n connOptions: APIConnectOptions,\n ) {\n super(sttImpl, opts.sampleRate, connOptions);\n this.opts = opts;\n this.stt = sttImpl;\n this.connOptions = connOptions;\n }\n\n get label(): string {\n return 'inference.SpeechStream';\n }\n\n updateOptions(opts: Partial<Pick<InferenceSTTOptions<TModel>, 'model' | 'language'>>): void {\n this.opts = { ...this.opts, ...opts };\n this.reconnectEvent.set();\n }\n\n protected async run(): Promise<void> {\n while (true) {\n // Create fresh resources for each connection attempt\n let ws: WebSocket | null = null;\n let closing = false;\n let finalReceived = false;\n\n const eventChannel = createStreamChannel<SttServerEvent>();\n\n const resourceCleanup = () => {\n if (closing) return;\n closing = true;\n eventChannel.close();\n ws?.removeAllListeners();\n ws?.close();\n };\n\n const createWsListener = async (ws: WebSocket, signal: AbortSignal) => {\n return new Promise<void>((resolve, reject) => {\n const onAbort = () => {\n resourceCleanup();\n reject(new Error('WebSocket connection aborted'));\n };\n\n signal.addEventListener('abort', onAbort, { once: true });\n\n ws.on('message', (data) => {\n const json = JSON.parse(data.toString()) as SttServerEvent;\n eventChannel.write(json);\n });\n\n ws.on('error', (e) => {\n this.#logger.error({ error: e }, 'WebSocket error');\n resourceCleanup();\n reject(e);\n });\n\n ws.on('close', (code: number) => {\n resourceCleanup();\n\n if (!closing) return this.#logger.error('WebSocket closed unexpectedly');\n if (finalReceived) return resolve();\n\n reject(\n new APIStatusError({\n message: 'LiveKit STT connection closed unexpectedly',\n options: { statusCode: code },\n }),\n );\n });\n });\n };\n\n const send = async (socket: WebSocket, signal: AbortSignal) => {\n const audioStream = new AudioByteStream(\n this.opts.sampleRate,\n 1,\n Math.floor(this.opts.sampleRate / 20), // 50ms\n );\n\n // Create abort promise once to avoid memory leak\n const abortPromise = new Promise<never>((_, reject) => {\n if (signal.aborted) {\n return reject(new Error('Send aborted'));\n }\n const onAbort = () => reject(new Error('Send aborted'));\n signal.addEventListener('abort', onAbort, { once: true });\n });\n\n // Manual iteration to support cancellation\n const iterator = this.input[Symbol.asyncIterator]();\n try {\n while (true) {\n const result = await Promise.race([iterator.next(), abortPromise]);\n\n if (result.done) break;\n const ev = result.value;\n\n let frames: AudioFrame[];\n if (ev === SpeechStream.FLUSH_SENTINEL) {\n frames = audioStream.flush();\n } else {\n const frame = ev as AudioFrame;\n frames = audioStream.write(new Int16Array(frame.data).buffer);\n }\n\n for (const frame of frames) {\n this.speechDuration += frame.samplesPerChannel / frame.sampleRate;\n const base64 = Buffer.from(frame.data.buffer).toString('base64');\n const msg = { type: 'input_audio', audio: base64 };\n socket.send(JSON.stringify(msg));\n }\n }\n\n closing = true;\n socket.send(JSON.stringify({ type: 'session.finalize' }));\n } catch (e) {\n if ((e as Error).message === 'Send aborted') {\n // Expected abort, don't log\n return;\n }\n throw e;\n }\n };\n\n const recv = async (signal: AbortSignal) => {\n const serverEventStream = eventChannel.stream();\n const reader = serverEventStream.getReader();\n\n try {\n while (!this.closed && !signal.aborted) {\n const result = await reader.read();\n if (signal.aborted) return;\n if (result.done) return;\n\n // Parse and validate with Zod schema\n const parseResult = await sttServerEventSchema.safeParseAsync(result.value);\n if (!parseResult.success) {\n this.#logger.warn(\n { error: parseResult.error, rawData: result.value },\n 'Failed to parse STT server event',\n );\n continue;\n }\n\n const event: SttServerEvent = parseResult.data;\n\n switch (event.type) {\n case 'session.created':\n case 'session.finalized':\n break;\n case 'session.closed':\n finalReceived = true;\n resourceCleanup();\n break;\n case 'interim_transcript':\n this.processTranscript(event, false);\n break;\n case 'final_transcript':\n this.processTranscript(event, true);\n break;\n case 'error':\n this.#logger.error({ error: event }, 'Received error from LiveKit STT');\n resourceCleanup();\n throw new APIError(`LiveKit STT returned error: ${JSON.stringify(event)}`);\n }\n }\n } finally {\n reader.releaseLock();\n try {\n await serverEventStream.cancel();\n } catch (e) {\n this.#logger.debug('Error cancelling serverEventStream (may already be cancelled):', e);\n }\n }\n };\n\n try {\n ws = await this.stt.connectWs(this.connOptions.timeoutMs);\n\n const controller = this.abortController; // Use base class abortController for proper cancellation\n const sendTask = Task.from(({ signal }) => send(ws!, signal), controller);\n const wsListenerTask = Task.from(({ signal }) => createWsListener(ws!, signal), controller);\n const recvTask = Task.from(({ signal }) => recv(signal), controller);\n const waitReconnectTask = Task.from(\n ({ signal }) => Promise.race([this.reconnectEvent.wait(), waitForAbort(signal)]),\n controller,\n );\n\n try {\n await Promise.race([\n Promise.all([sendTask.result, wsListenerTask.result, recvTask.result]),\n waitReconnectTask.result,\n ]);\n\n // If reconnect didn't trigger, tasks finished - exit loop\n if (!waitReconnectTask.done) break;\n\n // Reconnect triggered - clear event and continue loop\n this.reconnectEvent.clear();\n } finally {\n // Cancel all tasks to ensure cleanup\n await cancelAndWait(\n [sendTask, wsListenerTask, recvTask, waitReconnectTask],\n DEFAULT_CANCEL_TIMEOUT,\n );\n resourceCleanup();\n }\n } finally {\n // Ensure cleanup even if connectWs throws\n resourceCleanup();\n }\n }\n }\n\n private processTranscript(data: SttTranscriptEvent, isFinal: boolean) {\n // Check if queue is closed to avoid race condition during disconnect\n if (this.queue.closed) return;\n\n const requestId = data.session_id || this.requestId;\n const text = data.transcript;\n const language = data.language || this.opts.language || 'en';\n\n if (!text && !isFinal) return;\n\n try {\n // We'll have a more accurate way of detecting when speech started when we have VAD\n if (!this.speaking) {\n this.speaking = true;\n this.queue.put({ type: SpeechEventType.START_OF_SPEECH });\n }\n\n const speechData: SpeechData = {\n language,\n startTime: this.startTimeOffset + data.start,\n endTime: this.startTimeOffset + data.start + data.duration,\n confidence: data.confidence,\n text,\n words: data.words.map(\n (word): TimedString => ({\n text: word.word,\n startTime: word.start + this.startTimeOffset,\n endTime: word.end + this.startTimeOffset,\n startTimeOffset: this.startTimeOffset,\n confidence: word.confidence,\n }),\n ),\n };\n\n if (isFinal) {\n if (this.speechDuration > 0) {\n this.queue.put({\n type: SpeechEventType.RECOGNITION_USAGE,\n requestId,\n recognitionUsage: { audioDuration: this.speechDuration },\n });\n this.speechDuration = 0;\n }\n\n this.queue.put({\n type: SpeechEventType.FINAL_TRANSCRIPT,\n requestId,\n alternatives: [speechData],\n });\n\n if (this.speaking) {\n this.speaking = false;\n this.queue.put({ type: SpeechEventType.END_OF_SPEECH });\n }\n } else {\n this.queue.put({\n type: SpeechEventType.INTERIM_TRANSCRIPT,\n requestId,\n alternatives: [speechData],\n });\n }\n } catch (e) {\n if (e instanceof Error && e.message.includes('Queue is closed')) {\n // Expected behavior on disconnect, log as warning\n this.#logger.warn(\n { err: e },\n 'Queue closed during transcript processing (expected during disconnect)',\n );\n } else {\n this.#logger.error({ err: e }, 'Error putting transcript to queue');\n }\n }\n }\n}\n"],"mappings":"AAGA,eAAgC;AAEhC,SAAS,UAAU,sBAAsB;AACzC,SAAS,uBAAuB;AAChC,SAAS,WAAW;AACpB,SAAS,2BAA2B;AACpC;AAAA,EACE,OAAO;AAAA,EACP,gBAAgB;AAAA,EAGhB;AAAA,OACK;AACP,SAAiC,mCAAmC;AACpE,SAA2B,OAAO,MAAM,eAAe,WAAW,oBAAoB;AAEtF;AAAA,EAGE;AAAA,OACK;AACP,SAAyB,WAAW,yBAAyB;AAyE7D,MAAM,mBAAgC;AACtC,MAAM,sBAAsB;AAC5B,MAAM,mBAAmB;AACzB,MAAM,yBAAyB;AAgBxB,MAAM,YAAsC,QAAQ;AAAA,EACjD;AAAA,EACA,UAAqC,oBAAI,IAAI;AAAA,EAErD,UAAU,IAAI;AAAA,EAEd,YAAY,MAST;AACD,UAAM,EAAE,WAAW,MAAM,gBAAgB,MAAM,mBAAmB,OAAO,CAAC;AAE1E,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,eAAe,CAAC;AAAA,IAClB,IAAI,QAAQ,CAAC;AAEb,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,OAAO,gBAAgB,aAAqC;AAC1D,QAAI,YAAY,SAAS,GAAG,GAAG;AAC7B,YAAM,CAAC,OAAO,QAAQ,IAAI,YAAY,MAAM,GAAG;AAC/C,aAAO,IAAI,IAAI,EAAE,OAAO,SAAS,CAAC;AAAA,IACpC;AACA,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,MAAgB,WAAW,GAAsC;AAC/D,UAAM,IAAI,MAAM,sEAAsE;AAAA,EACxF;AAAA,EAEA,cAAc,MAA8E;AAC1F,SAAK,OAAO,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK;AAEpC,eAAW,UAAU,KAAK,SAAS;AACjC,aAAO,cAAc,IAAI;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,OAAO,SAGkB;AACvB,UAAM,EAAE,UAAU,cAAc,4BAA4B,IAAI,WAAW,CAAC;AAC5E,UAAM,aAAa;AAAA,MACjB,GAAG,KAAK;AAAA,MACR,UAAU,YAAY,KAAK,KAAK;AAAA,IAClC;AAEA,UAAM,SAAS,IAAI,aAAa,MAAM,YAAY,WAAW;AAC7D,SAAK,QAAQ,IAAI,MAAM;AAEvB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,UAAU,SAAqC;AACnD,UAAM,SAAS;AAAA,MACb,UAAU;AAAA,QACR,aAAa,OAAO,KAAK,KAAK,UAAU;AAAA,QACxC,UAAU,KAAK,KAAK;AAAA,QACpB,OAAO,KAAK,KAAK;AAAA,MACnB;AAAA,IACF;AAEA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,UAAU,QAAQ;AACjD,aAAO,QAAQ,KAAK,KAAK;AAAA,IAC3B;AAEA,QAAI,KAAK,KAAK,UAAU;AACtB,MAAC,OAAO,SAAqC,WAAW,KAAK,KAAK;AAAA,IACpE;AAEA,QAAI,UAAU,KAAK,KAAK;AACxB,QAAI,QAAQ,WAAW,SAAS,KAAK,QAAQ,WAAW,UAAU,GAAG;AACnE,gBAAU,QAAQ,QAAQ,QAAQ,IAAI;AAAA,IACxC;AAEA,UAAM,QAAQ,MAAM,kBAAkB,KAAK,KAAK,QAAQ,KAAK,KAAK,SAAS;AAC3E,UAAM,MAAM,GAAG,OAAO;AACtB,UAAM,UAAU,EAAE,eAAe,UAAU,KAAK,GAAG;AAEnD,UAAM,SAAS,MAAM,UAAU,KAAK,SAAS,OAAO;AACpD,UAAM,MAAM,EAAE,GAAG,QAAQ,MAAM,iBAAiB;AAChD,WAAO,KAAK,KAAK,UAAU,GAAG,CAAC;AAE/B,WAAO;AAAA,EACT;AACF;AAEO,MAAM,qBAA+C,iBAAiB;AAAA,EACnE;AAAA,EACA,YAAY,UAAU,cAAc;AAAA,EACpC,WAAW;AAAA,EACX,iBAAiB;AAAA,EACjB,iBAAiB,IAAI,MAAM;AAAA,EAC3B;AAAA,EACA;AAAA,EAER,UAAU,IAAI;AAAA,EAEd,YACE,SACA,MACA,aACA;AACA,UAAM,SAAS,KAAK,YAAY,WAAW;AAC3C,SAAK,OAAO;AACZ,SAAK,MAAM;AACX,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,cAAc,MAA8E;AAC1F,SAAK,OAAO,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK;AACpC,SAAK,eAAe,IAAI;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AACnC,WAAO,MAAM;AAEX,UAAI,KAAuB;AAC3B,UAAI,UAAU;AACd,UAAI,gBAAgB;AAEpB,YAAM,eAAe,oBAAoC;AAEzD,YAAM,kBAAkB,MAAM;AAC5B,YAAI,QAAS;AACb,kBAAU;AACV,qBAAa,MAAM;AACnB,iCAAI;AACJ,iCAAI;AAAA,MACN;AAEA,YAAM,mBAAmB,OAAOA,KAAe,WAAwB;AACrE,eAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,gBAAM,UAAU,MAAM;AACpB,4BAAgB;AAChB,mBAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,UAClD;AAEA,iBAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAExD,UAAAA,IAAG,GAAG,WAAW,CAAC,SAAS;AACzB,kBAAM,OAAO,KAAK,MAAM,KAAK,SAAS,CAAC;AACvC,yBAAa,MAAM,IAAI;AAAA,UACzB,CAAC;AAED,UAAAA,IAAG,GAAG,SAAS,CAAC,MAAM;AACpB,iBAAK,QAAQ,MAAM,EAAE,OAAO,EAAE,GAAG,iBAAiB;AAClD,4BAAgB;AAChB,mBAAO,CAAC;AAAA,UACV,CAAC;AAED,UAAAA,IAAG,GAAG,SAAS,CAAC,SAAiB;AAC/B,4BAAgB;AAEhB,gBAAI,CAAC,QAAS,QAAO,KAAK,QAAQ,MAAM,+BAA+B;AACvE,gBAAI,cAAe,QAAO,QAAQ;AAElC;AAAA,cACE,IAAI,eAAe;AAAA,gBACjB,SAAS;AAAA,gBACT,SAAS,EAAE,YAAY,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAEA,YAAM,OAAO,OAAO,QAAmB,WAAwB;AAC7D,cAAM,cAAc,IAAI;AAAA,UACtB,KAAK,KAAK;AAAA,UACV;AAAA,UACA,KAAK,MAAM,KAAK,KAAK,aAAa,EAAE;AAAA;AAAA,QACtC;AAGA,cAAM,eAAe,IAAI,QAAe,CAAC,GAAG,WAAW;AACrD,cAAI,OAAO,SAAS;AAClB,mBAAO,OAAO,IAAI,MAAM,cAAc,CAAC;AAAA,UACzC;AACA,gBAAM,UAAU,MAAM,OAAO,IAAI,MAAM,cAAc,CAAC;AACtD,iBAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,QAC1D,CAAC;AAGD,cAAM,WAAW,KAAK,MAAM,OAAO,aAAa,EAAE;AAClD,YAAI;AACF,iBAAO,MAAM;AACX,kBAAM,SAAS,MAAM,QAAQ,KAAK,CAAC,SAAS,KAAK,GAAG,YAAY,CAAC;AAEjE,gBAAI,OAAO,KAAM;AACjB,kBAAM,KAAK,OAAO;AAElB,gBAAI;AACJ,gBAAI,OAAO,aAAa,gBAAgB;AACtC,uBAAS,YAAY,MAAM;AAAA,YAC7B,OAAO;AACL,oBAAM,QAAQ;AACd,uBAAS,YAAY,MAAM,IAAI,WAAW,MAAM,IAAI,EAAE,MAAM;AAAA,YAC9D;AAEA,uBAAW,SAAS,QAAQ;AAC1B,mBAAK,kBAAkB,MAAM,oBAAoB,MAAM;AACvD,oBAAM,SAAS,OAAO,KAAK,MAAM,KAAK,MAAM,EAAE,SAAS,QAAQ;AAC/D,oBAAM,MAAM,EAAE,MAAM,eAAe,OAAO,OAAO;AACjD,qBAAO,KAAK,KAAK,UAAU,GAAG,CAAC;AAAA,YACjC;AAAA,UACF;AAEA,oBAAU;AACV,iBAAO,KAAK,KAAK,UAAU,EAAE,MAAM,mBAAmB,CAAC,CAAC;AAAA,QAC1D,SAAS,GAAG;AACV,cAAK,EAAY,YAAY,gBAAgB;AAE3C;AAAA,UACF;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAEA,YAAM,OAAO,OAAO,WAAwB;AAC1C,cAAM,oBAAoB,aAAa,OAAO;AAC9C,cAAM,SAAS,kBAAkB,UAAU;AAE3C,YAAI;AACF,iBAAO,CAAC,KAAK,UAAU,CAAC,OAAO,SAAS;AACtC,kBAAM,SAAS,MAAM,OAAO,KAAK;AACjC,gBAAI,OAAO,QAAS;AACpB,gBAAI,OAAO,KAAM;AAGjB,kBAAM,cAAc,MAAM,qBAAqB,eAAe,OAAO,KAAK;AAC1E,gBAAI,CAAC,YAAY,SAAS;AACxB,mBAAK,QAAQ;AAAA,gBACX,EAAE,OAAO,YAAY,OAAO,SAAS,OAAO,MAAM;AAAA,gBAClD;AAAA,cACF;AACA;AAAA,YACF;AAEA,kBAAM,QAAwB,YAAY;AAE1C,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AAAA,cACL,KAAK;AACH;AAAA,cACF,KAAK;AACH,gCAAgB;AAChB,gCAAgB;AAChB;AAAA,cACF,KAAK;AACH,qBAAK,kBAAkB,OAAO,KAAK;AACnC;AAAA,cACF,KAAK;AACH,qBAAK,kBAAkB,OAAO,IAAI;AAClC;AAAA,cACF,KAAK;AACH,qBAAK,QAAQ,MAAM,EAAE,OAAO,MAAM,GAAG,iCAAiC;AACtE,gCAAgB;AAChB,sBAAM,IAAI,SAAS,+BAA+B,KAAK,UAAU,KAAK,CAAC,EAAE;AAAA,YAC7E;AAAA,UACF;AAAA,QACF,UAAE;AACA,iBAAO,YAAY;AACnB,cAAI;AACF,kBAAM,kBAAkB,OAAO;AAAA,UACjC,SAAS,GAAG;AACV,iBAAK,QAAQ,MAAM,kEAAkE,CAAC;AAAA,UACxF;AAAA,QACF;AAAA,MACF;AAEA,UAAI;AACF,aAAK,MAAM,KAAK,IAAI,UAAU,KAAK,YAAY,SAAS;AAExD,cAAM,aAAa,KAAK;AACxB,cAAM,WAAW,KAAK,KAAK,CAAC,EAAE,OAAO,MAAM,KAAK,IAAK,MAAM,GAAG,UAAU;AACxE,cAAM,iBAAiB,KAAK,KAAK,CAAC,EAAE,OAAO,MAAM,iBAAiB,IAAK,MAAM,GAAG,UAAU;AAC1F,cAAM,WAAW,KAAK,KAAK,CAAC,EAAE,OAAO,MAAM,KAAK,MAAM,GAAG,UAAU;AACnE,cAAM,oBAAoB,KAAK;AAAA,UAC7B,CAAC,EAAE,OAAO,MAAM,QAAQ,KAAK,CAAC,KAAK,eAAe,KAAK,GAAG,aAAa,MAAM,CAAC,CAAC;AAAA,UAC/E;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,QAAQ,KAAK;AAAA,YACjB,QAAQ,IAAI,CAAC,SAAS,QAAQ,eAAe,QAAQ,SAAS,MAAM,CAAC;AAAA,YACrE,kBAAkB;AAAA,UACpB,CAAC;AAGD,cAAI,CAAC,kBAAkB,KAAM;AAG7B,eAAK,eAAe,MAAM;AAAA,QAC5B,UAAE;AAEA,gBAAM;AAAA,YACJ,CAAC,UAAU,gBAAgB,UAAU,iBAAiB;AAAA,YACtD;AAAA,UACF;AACA,0BAAgB;AAAA,QAClB;AAAA,MACF,UAAE;AAEA,wBAAgB;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,kBAAkB,MAA0B,SAAkB;AAEpE,QAAI,KAAK,MAAM,OAAQ;AAEvB,UAAM,YAAY,KAAK,cAAc,KAAK;AAC1C,UAAM,OAAO,KAAK;AAClB,UAAM,WAAW,KAAK,YAAY,KAAK,KAAK,YAAY;AAExD,QAAI,CAAC,QAAQ,CAAC,QAAS;AAEvB,QAAI;AAEF,UAAI,CAAC,KAAK,UAAU;AAClB,aAAK,WAAW;AAChB,aAAK,MAAM,IAAI,EAAE,MAAM,gBAAgB,gBAAgB,CAAC;AAAA,MAC1D;AAEA,YAAM,aAAyB;AAAA,QAC7B;AAAA,QACA,WAAW,KAAK,kBAAkB,KAAK;AAAA,QACvC,SAAS,KAAK,kBAAkB,KAAK,QAAQ,KAAK;AAAA,QAClD,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,OAAO,KAAK,MAAM;AAAA,UAChB,CAAC,UAAuB;AAAA,YACtB,MAAM,KAAK;AAAA,YACX,WAAW,KAAK,QAAQ,KAAK;AAAA,YAC7B,SAAS,KAAK,MAAM,KAAK;AAAA,YACzB,iBAAiB,KAAK;AAAA,YACtB,YAAY,KAAK;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAEA,UAAI,SAAS;AACX,YAAI,KAAK,iBAAiB,GAAG;AAC3B,eAAK,MAAM,IAAI;AAAA,YACb,MAAM,gBAAgB;AAAA,YACtB;AAAA,YACA,kBAAkB,EAAE,eAAe,KAAK,eAAe;AAAA,UACzD,CAAC;AACD,eAAK,iBAAiB;AAAA,QACxB;AAEA,aAAK,MAAM,IAAI;AAAA,UACb,MAAM,gBAAgB;AAAA,UACtB;AAAA,UACA,cAAc,CAAC,UAAU;AAAA,QAC3B,CAAC;AAED,YAAI,KAAK,UAAU;AACjB,eAAK,WAAW;AAChB,eAAK,MAAM,IAAI,EAAE,MAAM,gBAAgB,cAAc,CAAC;AAAA,QACxD;AAAA,MACF,OAAO;AACL,aAAK,MAAM,IAAI;AAAA,UACb,MAAM,gBAAgB;AAAA,UACtB;AAAA,UACA,cAAc,CAAC,UAAU;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF,SAAS,GAAG;AACV,UAAI,aAAa,SAAS,EAAE,QAAQ,SAAS,iBAAiB,GAAG;AAE/D,aAAK,QAAQ;AAAA,UACX,EAAE,KAAK,EAAE;AAAA,UACT;AAAA,QACF;AAAA,MACF,OAAO;AACL,aAAK,QAAQ,MAAM,EAAE,KAAK,EAAE,GAAG,mCAAmC;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AACF;","names":["ws"]}
1
+ {"version":3,"sources":["../../src/inference/stt.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { type AudioFrame } from '@livekit/rtc-node';\nimport type { WebSocket } from 'ws';\nimport { APIError, APIStatusError } from '../_exceptions.js';\nimport { AudioByteStream } from '../audio.js';\nimport { log } from '../log.js';\nimport { createStreamChannel } from '../stream/stream_channel.js';\nimport {\n STT as BaseSTT,\n SpeechStream as BaseSpeechStream,\n type SpeechData,\n type SpeechEvent,\n SpeechEventType,\n} from '../stt/index.js';\nimport { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS } from '../types.js';\nimport { type AudioBuffer, Event, Task, cancelAndWait, shortuuid, waitForAbort } from '../utils.js';\nimport { type TimedString, createTimedString } from '../voice/io.js';\nimport {\n type SttServerEvent,\n type SttTranscriptEvent,\n sttServerEventSchema,\n} from './api_protos.js';\nimport { type AnyString, connectWs, createAccessToken } from './utils.js';\n\nexport type DeepgramModels =\n | 'deepgram/flux-general'\n | 'deepgram/nova-3'\n | 'deepgram/nova-3-medical'\n | 'deepgram/nova-2'\n | 'deepgram/nova-2-medical'\n | 'deepgram/nova-2-conversationalai'\n | 'deepgram/nova-2-phonecall';\n\nexport type CartesiaModels = 'cartesia/ink-whisper';\n\nexport type AssemblyaiModels =\n | 'assemblyai/universal-streaming'\n | 'assemblyai/universal-streaming-multilingual';\n\nexport type ElevenlabsSTTModels = 'elevenlabs/scribe_v2_realtime';\n\nexport interface CartesiaOptions {\n min_volume?: number; // default: not specified\n max_silence_duration_secs?: number; // default: not specified\n}\n\nexport interface DeepgramOptions {\n filler_words?: boolean; // default: true\n interim_results?: boolean; // default: true\n endpointing?: number; // default: 25 (ms)\n punctuate?: boolean; // default: false\n smart_format?: boolean;\n keywords?: Array<[string, number]>;\n keyterms?: string[];\n profanity_filter?: boolean;\n numerals?: boolean;\n mip_opt_out?: boolean;\n}\n\nexport interface AssemblyAIOptions {\n format_turns?: boolean; // default: false\n end_of_turn_confidence_threshold?: number; // default: 0.01\n min_end_of_turn_silence_when_confident?: number; // default: 0\n max_turn_silence?: number; // default: not specified\n keyterms_prompt?: string[]; // default: not specified\n}\n\nexport type STTLanguages =\n | 'multi'\n | 'en'\n | 'de'\n | 'es'\n | 'fr'\n | 'ja'\n | 'pt'\n | 'zh'\n | 'hi'\n | AnyString;\n\ntype _STTModels = DeepgramModels | CartesiaModels | AssemblyaiModels | ElevenlabsSTTModels;\n\nexport type STTModels = _STTModels | 'auto' | AnyString;\n\nexport type ModelWithLanguage = `${_STTModels}:${STTLanguages}` | STTModels;\n\nexport type STTOptions<TModel extends STTModels> = TModel extends DeepgramModels\n ? DeepgramOptions\n : TModel extends CartesiaModels\n ? CartesiaOptions\n : TModel extends AssemblyaiModels\n ? AssemblyAIOptions\n : Record<string, unknown>;\n\nexport type STTEncoding = 'pcm_s16le';\n\nconst DEFAULT_ENCODING: STTEncoding = 'pcm_s16le';\nconst DEFAULT_SAMPLE_RATE = 16000;\nconst DEFAULT_BASE_URL = 'wss://agent-gateway.livekit.cloud/v1';\nconst DEFAULT_CANCEL_TIMEOUT = 5000;\n\nexport interface InferenceSTTOptions<TModel extends STTModels> {\n model?: TModel;\n language?: STTLanguages;\n encoding: STTEncoding;\n sampleRate: number;\n baseURL: string;\n apiKey: string;\n apiSecret: string;\n modelOptions: STTOptions<TModel>;\n}\n\n/**\n * Livekit Cloud Inference STT\n */\nexport class STT<TModel extends STTModels> extends BaseSTT {\n private opts: InferenceSTTOptions<TModel>;\n private streams: Set<SpeechStream<TModel>> = new Set();\n\n #logger = log();\n\n constructor(opts?: {\n model?: TModel;\n language?: STTLanguages;\n baseURL?: string;\n encoding?: STTEncoding;\n sampleRate?: number;\n apiKey?: string;\n apiSecret?: string;\n modelOptions?: STTOptions<TModel>;\n }) {\n super({ streaming: true, interimResults: true, alignedTranscript: 'word' });\n\n const {\n model,\n language,\n baseURL,\n encoding = DEFAULT_ENCODING,\n sampleRate = DEFAULT_SAMPLE_RATE,\n apiKey,\n apiSecret,\n modelOptions = {} as STTOptions<TModel>,\n } = opts || {};\n\n const lkBaseURL = baseURL || process.env.LIVEKIT_INFERENCE_URL || DEFAULT_BASE_URL;\n const lkApiKey = apiKey || process.env.LIVEKIT_INFERENCE_API_KEY || process.env.LIVEKIT_API_KEY;\n if (!lkApiKey) {\n throw new Error('apiKey is required: pass apiKey or set LIVEKIT_API_KEY');\n }\n\n const lkApiSecret =\n apiSecret || process.env.LIVEKIT_INFERENCE_API_SECRET || process.env.LIVEKIT_API_SECRET;\n if (!lkApiSecret) {\n throw new Error('apiSecret is required: pass apiSecret or set LIVEKIT_API_SECRET');\n }\n\n this.opts = {\n model,\n language,\n encoding,\n sampleRate,\n baseURL: lkBaseURL,\n apiKey: lkApiKey,\n apiSecret: lkApiSecret,\n modelOptions,\n };\n }\n\n get label(): string {\n return 'inference.STT';\n }\n\n static fromModelString(modelString: string): STT<AnyString> {\n if (modelString.includes(':')) {\n const [model, language] = modelString.split(':') as [AnyString, STTLanguages];\n return new STT({ model, language });\n }\n return new STT({ model: modelString });\n }\n\n protected async _recognize(_: AudioBuffer): Promise<SpeechEvent> {\n throw new Error('LiveKit STT does not support batch recognition, use stream() instead');\n }\n\n updateOptions(opts: Partial<Pick<InferenceSTTOptions<TModel>, 'model' | 'language'>>): void {\n this.opts = { ...this.opts, ...opts };\n\n for (const stream of this.streams) {\n stream.updateOptions(opts);\n }\n }\n\n stream(options?: {\n language?: STTLanguages | string;\n connOptions?: APIConnectOptions;\n }): SpeechStream<TModel> {\n const { language, connOptions = DEFAULT_API_CONNECT_OPTIONS } = options || {};\n const streamOpts = {\n ...this.opts,\n language: language ?? this.opts.language,\n } as InferenceSTTOptions<TModel>;\n\n const stream = new SpeechStream(this, streamOpts, connOptions);\n this.streams.add(stream);\n\n return stream;\n }\n\n async connectWs(timeout: number): Promise<WebSocket> {\n const params = {\n settings: {\n sample_rate: String(this.opts.sampleRate),\n encoding: this.opts.encoding,\n extra: this.opts.modelOptions,\n },\n } as Record<string, unknown>;\n\n if (this.opts.model && this.opts.model !== 'auto') {\n params.model = this.opts.model;\n }\n\n if (this.opts.language) {\n (params.settings as Record<string, unknown>).language = this.opts.language;\n }\n\n let baseURL = this.opts.baseURL;\n if (baseURL.startsWith('http://') || baseURL.startsWith('https://')) {\n baseURL = baseURL.replace('http', 'ws');\n }\n\n const token = await createAccessToken(this.opts.apiKey, this.opts.apiSecret);\n const url = `${baseURL}/stt`;\n const headers = { Authorization: `Bearer ${token}` } as Record<string, string>;\n\n const socket = await connectWs(url, headers, timeout);\n const msg = { ...params, type: 'session.create' };\n socket.send(JSON.stringify(msg));\n\n return socket;\n }\n}\n\nexport class SpeechStream<TModel extends STTModels> extends BaseSpeechStream {\n private opts: InferenceSTTOptions<TModel>;\n private requestId = shortuuid('stt_request_');\n private speaking = false;\n private speechDuration = 0;\n private reconnectEvent = new Event();\n private stt: STT<TModel>;\n private connOptions: APIConnectOptions;\n\n #logger = log();\n\n constructor(\n sttImpl: STT<TModel>,\n opts: InferenceSTTOptions<TModel>,\n connOptions: APIConnectOptions,\n ) {\n super(sttImpl, opts.sampleRate, connOptions);\n this.opts = opts;\n this.stt = sttImpl;\n this.connOptions = connOptions;\n }\n\n get label(): string {\n return 'inference.SpeechStream';\n }\n\n updateOptions(opts: Partial<Pick<InferenceSTTOptions<TModel>, 'model' | 'language'>>): void {\n this.opts = { ...this.opts, ...opts };\n this.reconnectEvent.set();\n }\n\n protected async run(): Promise<void> {\n while (true) {\n // Create fresh resources for each connection attempt\n let ws: WebSocket | null = null;\n let closing = false;\n let finalReceived = false;\n\n const eventChannel = createStreamChannel<SttServerEvent>();\n\n const resourceCleanup = () => {\n if (closing) return;\n closing = true;\n eventChannel.close();\n ws?.removeAllListeners();\n ws?.close();\n };\n\n const createWsListener = async (ws: WebSocket, signal: AbortSignal) => {\n return new Promise<void>((resolve, reject) => {\n const onAbort = () => {\n resourceCleanup();\n reject(new Error('WebSocket connection aborted'));\n };\n\n signal.addEventListener('abort', onAbort, { once: true });\n\n ws.on('message', (data) => {\n const json = JSON.parse(data.toString()) as SttServerEvent;\n eventChannel.write(json);\n });\n\n ws.on('error', (e) => {\n this.#logger.error({ error: e }, 'WebSocket error');\n resourceCleanup();\n reject(e);\n });\n\n ws.on('close', (code: number) => {\n resourceCleanup();\n\n if (!closing) return this.#logger.error('WebSocket closed unexpectedly');\n if (finalReceived) return resolve();\n\n reject(\n new APIStatusError({\n message: 'LiveKit STT connection closed unexpectedly',\n options: { statusCode: code },\n }),\n );\n });\n });\n };\n\n const send = async (socket: WebSocket, signal: AbortSignal) => {\n const audioStream = new AudioByteStream(\n this.opts.sampleRate,\n 1,\n Math.floor(this.opts.sampleRate / 20), // 50ms\n );\n\n // Create abort promise once to avoid memory leak\n const abortPromise = new Promise<never>((_, reject) => {\n if (signal.aborted) {\n return reject(new Error('Send aborted'));\n }\n const onAbort = () => reject(new Error('Send aborted'));\n signal.addEventListener('abort', onAbort, { once: true });\n });\n\n // Manual iteration to support cancellation\n const iterator = this.input[Symbol.asyncIterator]();\n try {\n while (true) {\n const result = await Promise.race([iterator.next(), abortPromise]);\n\n if (result.done) break;\n const ev = result.value;\n\n let frames: AudioFrame[];\n if (ev === SpeechStream.FLUSH_SENTINEL) {\n frames = audioStream.flush();\n } else {\n const frame = ev as AudioFrame;\n frames = audioStream.write(new Int16Array(frame.data).buffer);\n }\n\n for (const frame of frames) {\n this.speechDuration += frame.samplesPerChannel / frame.sampleRate;\n const base64 = Buffer.from(frame.data.buffer).toString('base64');\n const msg = { type: 'input_audio', audio: base64 };\n socket.send(JSON.stringify(msg));\n }\n }\n\n closing = true;\n socket.send(JSON.stringify({ type: 'session.finalize' }));\n } catch (e) {\n if ((e as Error).message === 'Send aborted') {\n // Expected abort, don't log\n return;\n }\n throw e;\n }\n };\n\n const recv = async (signal: AbortSignal) => {\n const serverEventStream = eventChannel.stream();\n const reader = serverEventStream.getReader();\n\n try {\n while (!this.closed && !signal.aborted) {\n const result = await reader.read();\n if (signal.aborted) return;\n if (result.done) return;\n\n // Parse and validate with Zod schema\n const parseResult = await sttServerEventSchema.safeParseAsync(result.value);\n if (!parseResult.success) {\n this.#logger.warn(\n { error: parseResult.error, rawData: result.value },\n 'Failed to parse STT server event',\n );\n continue;\n }\n\n const event: SttServerEvent = parseResult.data;\n\n switch (event.type) {\n case 'session.created':\n case 'session.finalized':\n break;\n case 'session.closed':\n finalReceived = true;\n resourceCleanup();\n break;\n case 'interim_transcript':\n this.processTranscript(event, false);\n break;\n case 'final_transcript':\n this.processTranscript(event, true);\n break;\n case 'error':\n this.#logger.error({ error: event }, 'Received error from LiveKit STT');\n resourceCleanup();\n throw new APIError(`LiveKit STT returned error: ${JSON.stringify(event)}`);\n }\n }\n } finally {\n reader.releaseLock();\n try {\n await serverEventStream.cancel();\n } catch (e) {\n this.#logger.debug('Error cancelling serverEventStream (may already be cancelled):', e);\n }\n }\n };\n\n try {\n ws = await this.stt.connectWs(this.connOptions.timeoutMs);\n\n const controller = this.abortController; // Use base class abortController for proper cancellation\n const sendTask = Task.from(({ signal }) => send(ws!, signal), controller);\n const wsListenerTask = Task.from(({ signal }) => createWsListener(ws!, signal), controller);\n const recvTask = Task.from(({ signal }) => recv(signal), controller);\n const waitReconnectTask = Task.from(\n ({ signal }) => Promise.race([this.reconnectEvent.wait(), waitForAbort(signal)]),\n controller,\n );\n\n try {\n await Promise.race([\n Promise.all([sendTask.result, wsListenerTask.result, recvTask.result]),\n waitReconnectTask.result,\n ]);\n\n // If reconnect didn't trigger, tasks finished - exit loop\n if (!waitReconnectTask.done) break;\n\n // Reconnect triggered - clear event and continue loop\n this.reconnectEvent.clear();\n } finally {\n // Cancel all tasks to ensure cleanup\n await cancelAndWait(\n [sendTask, wsListenerTask, recvTask, waitReconnectTask],\n DEFAULT_CANCEL_TIMEOUT,\n );\n resourceCleanup();\n }\n } finally {\n // Ensure cleanup even if connectWs throws\n resourceCleanup();\n }\n }\n }\n\n private processTranscript(data: SttTranscriptEvent, isFinal: boolean) {\n // Check if queue is closed to avoid race condition during disconnect\n if (this.queue.closed) return;\n\n const requestId = data.session_id || this.requestId;\n const text = data.transcript;\n const language = data.language || this.opts.language || 'en';\n\n if (!text && !isFinal) return;\n\n try {\n // We'll have a more accurate way of detecting when speech started when we have VAD\n if (!this.speaking) {\n this.speaking = true;\n this.queue.put({ type: SpeechEventType.START_OF_SPEECH });\n }\n\n const speechData: SpeechData = {\n language,\n startTime: this.startTimeOffset + data.start,\n endTime: this.startTimeOffset + data.start + data.duration,\n confidence: data.confidence,\n text,\n words: data.words.map(\n (word): TimedString =>\n createTimedString({\n text: word.word,\n startTime: word.start + this.startTimeOffset,\n endTime: word.end + this.startTimeOffset,\n startTimeOffset: this.startTimeOffset,\n confidence: word.confidence,\n }),\n ),\n };\n\n if (isFinal) {\n if (this.speechDuration > 0) {\n this.queue.put({\n type: SpeechEventType.RECOGNITION_USAGE,\n requestId,\n recognitionUsage: { audioDuration: this.speechDuration },\n });\n this.speechDuration = 0;\n }\n\n this.queue.put({\n type: SpeechEventType.FINAL_TRANSCRIPT,\n requestId,\n alternatives: [speechData],\n });\n\n if (this.speaking) {\n this.speaking = false;\n this.queue.put({ type: SpeechEventType.END_OF_SPEECH });\n }\n } else {\n this.queue.put({\n type: SpeechEventType.INTERIM_TRANSCRIPT,\n requestId,\n alternatives: [speechData],\n });\n }\n } catch (e) {\n if (e instanceof Error && e.message.includes('Queue is closed')) {\n // Expected behavior on disconnect, log as warning\n this.#logger.warn(\n { err: e },\n 'Queue closed during transcript processing (expected during disconnect)',\n );\n } else {\n this.#logger.error({ err: e }, 'Error putting transcript to queue');\n }\n }\n }\n}\n"],"mappings":"AAGA,eAAgC;AAEhC,SAAS,UAAU,sBAAsB;AACzC,SAAS,uBAAuB;AAChC,SAAS,WAAW;AACpB,SAAS,2BAA2B;AACpC;AAAA,EACE,OAAO;AAAA,EACP,gBAAgB;AAAA,EAGhB;AAAA,OACK;AACP,SAAiC,mCAAmC;AACpE,SAA2B,OAAO,MAAM,eAAe,WAAW,oBAAoB;AACtF,SAA2B,yBAAyB;AACpD;AAAA,EAGE;AAAA,OACK;AACP,SAAyB,WAAW,yBAAyB;AAyE7D,MAAM,mBAAgC;AACtC,MAAM,sBAAsB;AAC5B,MAAM,mBAAmB;AACzB,MAAM,yBAAyB;AAgBxB,MAAM,YAAsC,QAAQ;AAAA,EACjD;AAAA,EACA,UAAqC,oBAAI,IAAI;AAAA,EAErD,UAAU,IAAI;AAAA,EAEd,YAAY,MAST;AACD,UAAM,EAAE,WAAW,MAAM,gBAAgB,MAAM,mBAAmB,OAAO,CAAC;AAE1E,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA,eAAe,CAAC;AAAA,IAClB,IAAI,QAAQ,CAAC;AAEb,UAAM,YAAY,WAAW,QAAQ,IAAI,yBAAyB;AAClE,UAAM,WAAW,UAAU,QAAQ,IAAI,6BAA6B,QAAQ,IAAI;AAChF,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,UAAM,cACJ,aAAa,QAAQ,IAAI,gCAAgC,QAAQ,IAAI;AACvE,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAEA,SAAK,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,WAAW;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,OAAO,gBAAgB,aAAqC;AAC1D,QAAI,YAAY,SAAS,GAAG,GAAG;AAC7B,YAAM,CAAC,OAAO,QAAQ,IAAI,YAAY,MAAM,GAAG;AAC/C,aAAO,IAAI,IAAI,EAAE,OAAO,SAAS,CAAC;AAAA,IACpC;AACA,WAAO,IAAI,IAAI,EAAE,OAAO,YAAY,CAAC;AAAA,EACvC;AAAA,EAEA,MAAgB,WAAW,GAAsC;AAC/D,UAAM,IAAI,MAAM,sEAAsE;AAAA,EACxF;AAAA,EAEA,cAAc,MAA8E;AAC1F,SAAK,OAAO,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK;AAEpC,eAAW,UAAU,KAAK,SAAS;AACjC,aAAO,cAAc,IAAI;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,OAAO,SAGkB;AACvB,UAAM,EAAE,UAAU,cAAc,4BAA4B,IAAI,WAAW,CAAC;AAC5E,UAAM,aAAa;AAAA,MACjB,GAAG,KAAK;AAAA,MACR,UAAU,YAAY,KAAK,KAAK;AAAA,IAClC;AAEA,UAAM,SAAS,IAAI,aAAa,MAAM,YAAY,WAAW;AAC7D,SAAK,QAAQ,IAAI,MAAM;AAEvB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,UAAU,SAAqC;AACnD,UAAM,SAAS;AAAA,MACb,UAAU;AAAA,QACR,aAAa,OAAO,KAAK,KAAK,UAAU;AAAA,QACxC,UAAU,KAAK,KAAK;AAAA,QACpB,OAAO,KAAK,KAAK;AAAA,MACnB;AAAA,IACF;AAEA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,UAAU,QAAQ;AACjD,aAAO,QAAQ,KAAK,KAAK;AAAA,IAC3B;AAEA,QAAI,KAAK,KAAK,UAAU;AACtB,MAAC,OAAO,SAAqC,WAAW,KAAK,KAAK;AAAA,IACpE;AAEA,QAAI,UAAU,KAAK,KAAK;AACxB,QAAI,QAAQ,WAAW,SAAS,KAAK,QAAQ,WAAW,UAAU,GAAG;AACnE,gBAAU,QAAQ,QAAQ,QAAQ,IAAI;AAAA,IACxC;AAEA,UAAM,QAAQ,MAAM,kBAAkB,KAAK,KAAK,QAAQ,KAAK,KAAK,SAAS;AAC3E,UAAM,MAAM,GAAG,OAAO;AACtB,UAAM,UAAU,EAAE,eAAe,UAAU,KAAK,GAAG;AAEnD,UAAM,SAAS,MAAM,UAAU,KAAK,SAAS,OAAO;AACpD,UAAM,MAAM,EAAE,GAAG,QAAQ,MAAM,iBAAiB;AAChD,WAAO,KAAK,KAAK,UAAU,GAAG,CAAC;AAE/B,WAAO;AAAA,EACT;AACF;AAEO,MAAM,qBAA+C,iBAAiB;AAAA,EACnE;AAAA,EACA,YAAY,UAAU,cAAc;AAAA,EACpC,WAAW;AAAA,EACX,iBAAiB;AAAA,EACjB,iBAAiB,IAAI,MAAM;AAAA,EAC3B;AAAA,EACA;AAAA,EAER,UAAU,IAAI;AAAA,EAEd,YACE,SACA,MACA,aACA;AACA,UAAM,SAAS,KAAK,YAAY,WAAW;AAC3C,SAAK,OAAO;AACZ,SAAK,MAAM;AACX,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA,EAEA,cAAc,MAA8E;AAC1F,SAAK,OAAO,EAAE,GAAG,KAAK,MAAM,GAAG,KAAK;AACpC,SAAK,eAAe,IAAI;AAAA,EAC1B;AAAA,EAEA,MAAgB,MAAqB;AACnC,WAAO,MAAM;AAEX,UAAI,KAAuB;AAC3B,UAAI,UAAU;AACd,UAAI,gBAAgB;AAEpB,YAAM,eAAe,oBAAoC;AAEzD,YAAM,kBAAkB,MAAM;AAC5B,YAAI,QAAS;AACb,kBAAU;AACV,qBAAa,MAAM;AACnB,iCAAI;AACJ,iCAAI;AAAA,MACN;AAEA,YAAM,mBAAmB,OAAOA,KAAe,WAAwB;AACrE,eAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,gBAAM,UAAU,MAAM;AACpB,4BAAgB;AAChB,mBAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,UAClD;AAEA,iBAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAExD,UAAAA,IAAG,GAAG,WAAW,CAAC,SAAS;AACzB,kBAAM,OAAO,KAAK,MAAM,KAAK,SAAS,CAAC;AACvC,yBAAa,MAAM,IAAI;AAAA,UACzB,CAAC;AAED,UAAAA,IAAG,GAAG,SAAS,CAAC,MAAM;AACpB,iBAAK,QAAQ,MAAM,EAAE,OAAO,EAAE,GAAG,iBAAiB;AAClD,4BAAgB;AAChB,mBAAO,CAAC;AAAA,UACV,CAAC;AAED,UAAAA,IAAG,GAAG,SAAS,CAAC,SAAiB;AAC/B,4BAAgB;AAEhB,gBAAI,CAAC,QAAS,QAAO,KAAK,QAAQ,MAAM,+BAA+B;AACvE,gBAAI,cAAe,QAAO,QAAQ;AAElC;AAAA,cACE,IAAI,eAAe;AAAA,gBACjB,SAAS;AAAA,gBACT,SAAS,EAAE,YAAY,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAEA,YAAM,OAAO,OAAO,QAAmB,WAAwB;AAC7D,cAAM,cAAc,IAAI;AAAA,UACtB,KAAK,KAAK;AAAA,UACV;AAAA,UACA,KAAK,MAAM,KAAK,KAAK,aAAa,EAAE;AAAA;AAAA,QACtC;AAGA,cAAM,eAAe,IAAI,QAAe,CAAC,GAAG,WAAW;AACrD,cAAI,OAAO,SAAS;AAClB,mBAAO,OAAO,IAAI,MAAM,cAAc,CAAC;AAAA,UACzC;AACA,gBAAM,UAAU,MAAM,OAAO,IAAI,MAAM,cAAc,CAAC;AACtD,iBAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,QAC1D,CAAC;AAGD,cAAM,WAAW,KAAK,MAAM,OAAO,aAAa,EAAE;AAClD,YAAI;AACF,iBAAO,MAAM;AACX,kBAAM,SAAS,MAAM,QAAQ,KAAK,CAAC,SAAS,KAAK,GAAG,YAAY,CAAC;AAEjE,gBAAI,OAAO,KAAM;AACjB,kBAAM,KAAK,OAAO;AAElB,gBAAI;AACJ,gBAAI,OAAO,aAAa,gBAAgB;AACtC,uBAAS,YAAY,MAAM;AAAA,YAC7B,OAAO;AACL,oBAAM,QAAQ;AACd,uBAAS,YAAY,MAAM,IAAI,WAAW,MAAM,IAAI,EAAE,MAAM;AAAA,YAC9D;AAEA,uBAAW,SAAS,QAAQ;AAC1B,mBAAK,kBAAkB,MAAM,oBAAoB,MAAM;AACvD,oBAAM,SAAS,OAAO,KAAK,MAAM,KAAK,MAAM,EAAE,SAAS,QAAQ;AAC/D,oBAAM,MAAM,EAAE,MAAM,eAAe,OAAO,OAAO;AACjD,qBAAO,KAAK,KAAK,UAAU,GAAG,CAAC;AAAA,YACjC;AAAA,UACF;AAEA,oBAAU;AACV,iBAAO,KAAK,KAAK,UAAU,EAAE,MAAM,mBAAmB,CAAC,CAAC;AAAA,QAC1D,SAAS,GAAG;AACV,cAAK,EAAY,YAAY,gBAAgB;AAE3C;AAAA,UACF;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAEA,YAAM,OAAO,OAAO,WAAwB;AAC1C,cAAM,oBAAoB,aAAa,OAAO;AAC9C,cAAM,SAAS,kBAAkB,UAAU;AAE3C,YAAI;AACF,iBAAO,CAAC,KAAK,UAAU,CAAC,OAAO,SAAS;AACtC,kBAAM,SAAS,MAAM,OAAO,KAAK;AACjC,gBAAI,OAAO,QAAS;AACpB,gBAAI,OAAO,KAAM;AAGjB,kBAAM,cAAc,MAAM,qBAAqB,eAAe,OAAO,KAAK;AAC1E,gBAAI,CAAC,YAAY,SAAS;AACxB,mBAAK,QAAQ;AAAA,gBACX,EAAE,OAAO,YAAY,OAAO,SAAS,OAAO,MAAM;AAAA,gBAClD;AAAA,cACF;AACA;AAAA,YACF;AAEA,kBAAM,QAAwB,YAAY;AAE1C,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AAAA,cACL,KAAK;AACH;AAAA,cACF,KAAK;AACH,gCAAgB;AAChB,gCAAgB;AAChB;AAAA,cACF,KAAK;AACH,qBAAK,kBAAkB,OAAO,KAAK;AACnC;AAAA,cACF,KAAK;AACH,qBAAK,kBAAkB,OAAO,IAAI;AAClC;AAAA,cACF,KAAK;AACH,qBAAK,QAAQ,MAAM,EAAE,OAAO,MAAM,GAAG,iCAAiC;AACtE,gCAAgB;AAChB,sBAAM,IAAI,SAAS,+BAA+B,KAAK,UAAU,KAAK,CAAC,EAAE;AAAA,YAC7E;AAAA,UACF;AAAA,QACF,UAAE;AACA,iBAAO,YAAY;AACnB,cAAI;AACF,kBAAM,kBAAkB,OAAO;AAAA,UACjC,SAAS,GAAG;AACV,iBAAK,QAAQ,MAAM,kEAAkE,CAAC;AAAA,UACxF;AAAA,QACF;AAAA,MACF;AAEA,UAAI;AACF,aAAK,MAAM,KAAK,IAAI,UAAU,KAAK,YAAY,SAAS;AAExD,cAAM,aAAa,KAAK;AACxB,cAAM,WAAW,KAAK,KAAK,CAAC,EAAE,OAAO,MAAM,KAAK,IAAK,MAAM,GAAG,UAAU;AACxE,cAAM,iBAAiB,KAAK,KAAK,CAAC,EAAE,OAAO,MAAM,iBAAiB,IAAK,MAAM,GAAG,UAAU;AAC1F,cAAM,WAAW,KAAK,KAAK,CAAC,EAAE,OAAO,MAAM,KAAK,MAAM,GAAG,UAAU;AACnE,cAAM,oBAAoB,KAAK;AAAA,UAC7B,CAAC,EAAE,OAAO,MAAM,QAAQ,KAAK,CAAC,KAAK,eAAe,KAAK,GAAG,aAAa,MAAM,CAAC,CAAC;AAAA,UAC/E;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,QAAQ,KAAK;AAAA,YACjB,QAAQ,IAAI,CAAC,SAAS,QAAQ,eAAe,QAAQ,SAAS,MAAM,CAAC;AAAA,YACrE,kBAAkB;AAAA,UACpB,CAAC;AAGD,cAAI,CAAC,kBAAkB,KAAM;AAG7B,eAAK,eAAe,MAAM;AAAA,QAC5B,UAAE;AAEA,gBAAM;AAAA,YACJ,CAAC,UAAU,gBAAgB,UAAU,iBAAiB;AAAA,YACtD;AAAA,UACF;AACA,0BAAgB;AAAA,QAClB;AAAA,MACF,UAAE;AAEA,wBAAgB;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,kBAAkB,MAA0B,SAAkB;AAEpE,QAAI,KAAK,MAAM,OAAQ;AAEvB,UAAM,YAAY,KAAK,cAAc,KAAK;AAC1C,UAAM,OAAO,KAAK;AAClB,UAAM,WAAW,KAAK,YAAY,KAAK,KAAK,YAAY;AAExD,QAAI,CAAC,QAAQ,CAAC,QAAS;AAEvB,QAAI;AAEF,UAAI,CAAC,KAAK,UAAU;AAClB,aAAK,WAAW;AAChB,aAAK,MAAM,IAAI,EAAE,MAAM,gBAAgB,gBAAgB,CAAC;AAAA,MAC1D;AAEA,YAAM,aAAyB;AAAA,QAC7B;AAAA,QACA,WAAW,KAAK,kBAAkB,KAAK;AAAA,QACvC,SAAS,KAAK,kBAAkB,KAAK,QAAQ,KAAK;AAAA,QAClD,YAAY,KAAK;AAAA,QACjB;AAAA,QACA,OAAO,KAAK,MAAM;AAAA,UAChB,CAAC,SACC,kBAAkB;AAAA,YAChB,MAAM,KAAK;AAAA,YACX,WAAW,KAAK,QAAQ,KAAK;AAAA,YAC7B,SAAS,KAAK,MAAM,KAAK;AAAA,YACzB,iBAAiB,KAAK;AAAA,YACtB,YAAY,KAAK;AAAA,UACnB,CAAC;AAAA,QACL;AAAA,MACF;AAEA,UAAI,SAAS;AACX,YAAI,KAAK,iBAAiB,GAAG;AAC3B,eAAK,MAAM,IAAI;AAAA,YACb,MAAM,gBAAgB;AAAA,YACtB;AAAA,YACA,kBAAkB,EAAE,eAAe,KAAK,eAAe;AAAA,UACzD,CAAC;AACD,eAAK,iBAAiB;AAAA,QACxB;AAEA,aAAK,MAAM,IAAI;AAAA,UACb,MAAM,gBAAgB;AAAA,UACtB;AAAA,UACA,cAAc,CAAC,UAAU;AAAA,QAC3B,CAAC;AAED,YAAI,KAAK,UAAU;AACjB,eAAK,WAAW;AAChB,eAAK,MAAM,IAAI,EAAE,MAAM,gBAAgB,cAAc,CAAC;AAAA,QACxD;AAAA,MACF,OAAO;AACL,aAAK,MAAM,IAAI;AAAA,UACb,MAAM,gBAAgB;AAAA,UACtB;AAAA,UACA,cAAc,CAAC,UAAU;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF,SAAS,GAAG;AACV,UAAI,aAAa,SAAS,EAAE,QAAQ,SAAS,iBAAiB,GAAG;AAE/D,aAAK,QAAQ;AAAA,UACX,EAAE,KAAK,EAAE;AAAA,UACT;AAAA,QACF;AAAA,MACF,OAAO;AACL,aAAK,QAAQ,MAAM,EAAE,KAAK,EAAE,GAAG,mCAAmC;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AACF;","names":["ws"]}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/llm/realtime.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { AudioFrame } from '@livekit/rtc-node';\nimport { EventEmitter } from 'events';\nimport type { ReadableStream } from 'node:stream/web';\nimport { DeferredReadableStream } from '../stream/deferred_stream.js';\nimport { Task } from '../utils.js';\nimport type { ChatContext, FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport type InputSpeechStartedEvent = object;\n\nexport interface InputSpeechStoppedEvent {\n userTranscriptionEnabled: boolean;\n}\n\nexport interface MessageGeneration {\n messageId: string;\n textStream: ReadableStream<string>;\n audioStream: ReadableStream<AudioFrame>;\n modalities?: Promise<('text' | 'audio')[]>;\n}\n\nexport interface GenerationCreatedEvent {\n messageStream: ReadableStream<MessageGeneration>;\n functionStream: ReadableStream<FunctionCall>;\n userInitiated: boolean;\n /** Response ID for correlating metrics with spans */\n responseId?: string;\n}\n\nexport interface RealtimeModelError {\n type: 'realtime_model_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport interface RealtimeCapabilities {\n messageTruncation: boolean;\n turnDetection: boolean;\n userTranscription: boolean;\n autoToolReplyGeneration: boolean;\n audioOutput: boolean;\n}\n\nexport interface InputTranscriptionCompleted {\n itemId: string;\n transcript: string;\n isFinal: boolean;\n}\n\nexport interface RealtimeSessionReconnectedEvent {}\n\nexport abstract class RealtimeModel {\n private _capabilities: RealtimeCapabilities;\n\n constructor(capabilities: RealtimeCapabilities) {\n this._capabilities = capabilities;\n }\n\n get capabilities() {\n return this._capabilities;\n }\n\n /** The model name/identifier used by this realtime model */\n abstract get model(): string;\n\n abstract session(): RealtimeSession;\n\n abstract close(): Promise<void>;\n}\n\nexport abstract class RealtimeSession extends EventEmitter {\n protected _realtimeModel: RealtimeModel;\n private deferredInputStream = new DeferredReadableStream<AudioFrame>();\n private _mainTask: Task<void>;\n\n constructor(realtimeModel: RealtimeModel) {\n super();\n this._realtimeModel = realtimeModel;\n this._mainTask = Task.from((controller) => this._mainTaskImpl(controller.signal));\n }\n\n get realtimeModel() {\n return this._realtimeModel;\n }\n\n abstract get chatCtx(): ChatContext;\n\n abstract get tools(): ToolContext;\n\n abstract updateInstructions(instructions: string): Promise<void>;\n\n /**\n * @throws RealtimeError on Timeout\n */\n abstract updateChatCtx(chatCtx: ChatContext): Promise<void>;\n\n abstract updateTools(tools: ToolContext): Promise<void>;\n\n abstract updateOptions(options: { toolChoice?: ToolChoice | null }): void;\n\n abstract pushAudio(frame: AudioFrame): void;\n\n /**\n * @throws RealtimeError on Timeout\n */\n abstract generateReply(instructions?: string): Promise<GenerationCreatedEvent>;\n\n /**\n * Commit the input audio buffer to the server\n */\n abstract commitAudio(): Promise<void>;\n\n /**\n * Clear the input audio buffer to the server\n */\n abstract clearAudio(): Promise<void>;\n\n /**\n * Cancel the current generation (do nothing if no generation is in progress)\n */\n abstract interrupt(): Promise<void>;\n\n /**\n * Truncate the message at the given audio end time\n */\n abstract truncate(options: {\n messageId: string;\n audioEndMs: number;\n modalities?: ('text' | 'audio')[];\n audioTranscript?: string;\n }): Promise<void>;\n\n async close(): Promise<void> {\n this._mainTask.cancel();\n }\n\n /**\n * Notifies the model that user activity has started\n */\n startUserActivity(): void {\n return;\n }\n\n private async _mainTaskImpl(signal: AbortSignal): Promise<void> {\n const reader = this.deferredInputStream.stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done || signal.aborted) {\n break;\n }\n this.pushAudio(value);\n }\n }\n\n setInputAudioStream(audioStream: ReadableStream<AudioFrame>): void {\n this.deferredInputStream.setSource(audioStream);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,oBAA6B;AAE7B,6BAAuC;AACvC,mBAAqB;AAiDd,MAAe,cAAc;AAAA,EAC1B;AAAA,EAER,YAAY,cAAoC;AAC9C,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,IAAI,eAAe;AACjB,WAAO,KAAK;AAAA,EACd;AAQF;AAEO,MAAe,wBAAwB,2BAAa;AAAA,EAC/C;AAAA,EACF,sBAAsB,IAAI,8CAAmC;AAAA,EAC7D;AAAA,EAER,YAAY,eAA8B;AACxC,UAAM;AACN,SAAK,iBAAiB;AACtB,SAAK,YAAY,kBAAK,KAAK,CAAC,eAAe,KAAK,cAAc,WAAW,MAAM,CAAC;AAAA,EAClF;AAAA,EAEA,IAAI,gBAAgB;AAClB,WAAO,KAAK;AAAA,EACd;AAAA,EAiDA,MAAM,QAAuB;AAC3B,SAAK,UAAU,OAAO;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAA0B;AACxB;AAAA,EACF;AAAA,EAEA,MAAc,cAAc,QAAoC;AAC9D,UAAM,SAAS,KAAK,oBAAoB,OAAO,UAAU;AACzD,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AACA,WAAK,UAAU,KAAK;AAAA,IACtB;AAAA,EACF;AAAA,EAEA,oBAAoB,aAA+C;AACjE,SAAK,oBAAoB,UAAU,WAAW;AAAA,EAChD;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/llm/realtime.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { AudioFrame } from '@livekit/rtc-node';\nimport { EventEmitter } from 'events';\nimport type { ReadableStream } from 'node:stream/web';\nimport { DeferredReadableStream } from '../stream/deferred_stream.js';\nimport { Task } from '../utils.js';\nimport type { TimedString } from '../voice/io.js';\nimport type { ChatContext, FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport type InputSpeechStartedEvent = object;\n\nexport interface InputSpeechStoppedEvent {\n userTranscriptionEnabled: boolean;\n}\n\nexport interface MessageGeneration {\n messageId: string;\n /**\n * Text stream that may contain plain strings or TimedString objects with timestamps.\n */\n textStream: ReadableStream<string | TimedString>;\n audioStream: ReadableStream<AudioFrame>;\n modalities?: Promise<('text' | 'audio')[]>;\n}\n\nexport interface GenerationCreatedEvent {\n messageStream: ReadableStream<MessageGeneration>;\n functionStream: ReadableStream<FunctionCall>;\n userInitiated: boolean;\n /** Response ID for correlating metrics with spans */\n responseId?: string;\n}\n\nexport interface RealtimeModelError {\n type: 'realtime_model_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport interface RealtimeCapabilities {\n messageTruncation: boolean;\n turnDetection: boolean;\n userTranscription: boolean;\n autoToolReplyGeneration: boolean;\n audioOutput: boolean;\n}\n\nexport interface InputTranscriptionCompleted {\n itemId: string;\n transcript: string;\n isFinal: boolean;\n}\n\nexport interface RealtimeSessionReconnectedEvent {}\n\nexport abstract class RealtimeModel {\n private _capabilities: RealtimeCapabilities;\n\n constructor(capabilities: RealtimeCapabilities) {\n this._capabilities = capabilities;\n }\n\n get capabilities() {\n return this._capabilities;\n }\n\n /** The model name/identifier used by this realtime model */\n abstract get model(): string;\n\n abstract session(): RealtimeSession;\n\n abstract close(): Promise<void>;\n}\n\nexport abstract class RealtimeSession extends EventEmitter {\n protected _realtimeModel: RealtimeModel;\n private deferredInputStream = new DeferredReadableStream<AudioFrame>();\n private _mainTask: Task<void>;\n\n constructor(realtimeModel: RealtimeModel) {\n super();\n this._realtimeModel = realtimeModel;\n this._mainTask = Task.from((controller) => this._mainTaskImpl(controller.signal));\n }\n\n get realtimeModel() {\n return this._realtimeModel;\n }\n\n abstract get chatCtx(): ChatContext;\n\n abstract get tools(): ToolContext;\n\n abstract updateInstructions(instructions: string): Promise<void>;\n\n /**\n * @throws RealtimeError on Timeout\n */\n abstract updateChatCtx(chatCtx: ChatContext): Promise<void>;\n\n abstract updateTools(tools: ToolContext): Promise<void>;\n\n abstract updateOptions(options: { toolChoice?: ToolChoice | null }): void;\n\n abstract pushAudio(frame: AudioFrame): void;\n\n /**\n * @throws RealtimeError on Timeout\n */\n abstract generateReply(instructions?: string): Promise<GenerationCreatedEvent>;\n\n /**\n * Commit the input audio buffer to the server\n */\n abstract commitAudio(): Promise<void>;\n\n /**\n * Clear the input audio buffer to the server\n */\n abstract clearAudio(): Promise<void>;\n\n /**\n * Cancel the current generation (do nothing if no generation is in progress)\n */\n abstract interrupt(): Promise<void>;\n\n /**\n * Truncate the message at the given audio end time\n */\n abstract truncate(options: {\n messageId: string;\n audioEndMs: number;\n modalities?: ('text' | 'audio')[];\n audioTranscript?: string;\n }): Promise<void>;\n\n async close(): Promise<void> {\n this._mainTask.cancel();\n }\n\n /**\n * Notifies the model that user activity has started\n */\n startUserActivity(): void {\n return;\n }\n\n private async _mainTaskImpl(signal: AbortSignal): Promise<void> {\n const reader = this.deferredInputStream.stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done || signal.aborted) {\n break;\n }\n this.pushAudio(value);\n }\n }\n\n setInputAudioStream(audioStream: ReadableStream<AudioFrame>): void {\n this.deferredInputStream.setSource(audioStream);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,oBAA6B;AAE7B,6BAAuC;AACvC,mBAAqB;AAqDd,MAAe,cAAc;AAAA,EAC1B;AAAA,EAER,YAAY,cAAoC;AAC9C,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,IAAI,eAAe;AACjB,WAAO,KAAK;AAAA,EACd;AAQF;AAEO,MAAe,wBAAwB,2BAAa;AAAA,EAC/C;AAAA,EACF,sBAAsB,IAAI,8CAAmC;AAAA,EAC7D;AAAA,EAER,YAAY,eAA8B;AACxC,UAAM;AACN,SAAK,iBAAiB;AACtB,SAAK,YAAY,kBAAK,KAAK,CAAC,eAAe,KAAK,cAAc,WAAW,MAAM,CAAC;AAAA,EAClF;AAAA,EAEA,IAAI,gBAAgB;AAClB,WAAO,KAAK;AAAA,EACd;AAAA,EAiDA,MAAM,QAAuB;AAC3B,SAAK,UAAU,OAAO;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAA0B;AACxB;AAAA,EACF;AAAA,EAEA,MAAc,cAAc,QAAoC;AAC9D,UAAM,SAAS,KAAK,oBAAoB,OAAO,UAAU;AACzD,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AACA,WAAK,UAAU,KAAK;AAAA,IACtB;AAAA,EACF;AAAA,EAEA,oBAAoB,aAA+C;AACjE,SAAK,oBAAoB,UAAU,WAAW;AAAA,EAChD;AACF;","names":[]}
@@ -3,6 +3,7 @@
3
3
  import type { AudioFrame } from '@livekit/rtc-node';
4
4
  import { EventEmitter } from 'events';
5
5
  import type { ReadableStream } from 'node:stream/web';
6
+ import type { TimedString } from '../voice/io.js';
6
7
  import type { ChatContext, FunctionCall } from './chat_context.js';
7
8
  import type { ToolChoice, ToolContext } from './tool_context.js';
8
9
  export type InputSpeechStartedEvent = object;
@@ -11,7 +12,10 @@ export interface InputSpeechStoppedEvent {
11
12
  }
12
13
  export interface MessageGeneration {
13
14
  messageId: string;
14
- textStream: ReadableStream<string>;
15
+ /**
16
+ * Text stream that may contain plain strings or TimedString objects with timestamps.
17
+ */
18
+ textStream: ReadableStream<string | TimedString>;
15
19
  audioStream: ReadableStream<AudioFrame>;
16
20
  modalities?: Promise<('text' | 'audio')[]>;
17
21
  }
@@ -3,6 +3,7 @@
3
3
  import type { AudioFrame } from '@livekit/rtc-node';
4
4
  import { EventEmitter } from 'events';
5
5
  import type { ReadableStream } from 'node:stream/web';
6
+ import type { TimedString } from '../voice/io.js';
6
7
  import type { ChatContext, FunctionCall } from './chat_context.js';
7
8
  import type { ToolChoice, ToolContext } from './tool_context.js';
8
9
  export type InputSpeechStartedEvent = object;
@@ -11,7 +12,10 @@ export interface InputSpeechStoppedEvent {
11
12
  }
12
13
  export interface MessageGeneration {
13
14
  messageId: string;
14
- textStream: ReadableStream<string>;
15
+ /**
16
+ * Text stream that may contain plain strings or TimedString objects with timestamps.
17
+ */
18
+ textStream: ReadableStream<string | TimedString>;
15
19
  audioStream: ReadableStream<AudioFrame>;
16
20
  modalities?: Promise<('text' | 'audio')[]>;
17
21
  }
@@ -1 +1 @@
1
- {"version":3,"file":"realtime.d.ts","sourceRoot":"","sources":["../../src/llm/realtime.ts"],"names":[],"mappings":";;AAGA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AACtC,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAGtD,OAAO,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AACnE,OAAO,KAAK,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEjE,MAAM,MAAM,uBAAuB,GAAG,MAAM,CAAC;AAE7C,MAAM,WAAW,uBAAuB;IACtC,wBAAwB,EAAE,OAAO,CAAC;CACnC;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,cAAc,CAAC,MAAM,CAAC,CAAC;IACnC,WAAW,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;IACxC,UAAU,CAAC,EAAE,OAAO,CAAC,CAAC,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC,CAAC;CAC5C;AAED,MAAM,WAAW,sBAAsB;IACrC,aAAa,EAAE,cAAc,CAAC,iBAAiB,CAAC,CAAC;IACjD,cAAc,EAAE,cAAc,CAAC,YAAY,CAAC,CAAC;IAC7C,aAAa,EAAE,OAAO,CAAC;IACvB,qDAAqD;IACrD,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,sBAAsB,CAAC;IAC7B,SAAS,EAAE,MAAM,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,KAAK,CAAC;IACb,WAAW,EAAE,OAAO,CAAC;CACtB;AAED,MAAM,WAAW,oBAAoB;IACnC,iBAAiB,EAAE,OAAO,CAAC;IAC3B,aAAa,EAAE,OAAO,CAAC;IACvB,iBAAiB,EAAE,OAAO,CAAC;IAC3B,uBAAuB,EAAE,OAAO,CAAC;IACjC,WAAW,EAAE,OAAO,CAAC;CACtB;AAED,MAAM,WAAW,2BAA2B;IAC1C,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,+BAA+B;CAAG;AAEnD,8BAAsB,aAAa;IACjC,OAAO,CAAC,aAAa,CAAuB;gBAEhC,YAAY,EAAE,oBAAoB;IAI9C,IAAI,YAAY,yBAEf;IAED,4DAA4D;IAC5D,QAAQ,KAAK,KAAK,IAAI,MAAM,CAAC;IAE7B,QAAQ,CAAC,OAAO,IAAI,eAAe;IAEnC,QAAQ,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAChC;AAED,8BAAsB,eAAgB,SAAQ,YAAY;IACxD,SAAS,CAAC,cAAc,EAAE,aAAa,CAAC;IACxC,OAAO,CAAC,mBAAmB,CAA4C;IACvE,OAAO,CAAC,SAAS,CAAa;gBAElB,aAAa,EAAE,aAAa;IAMxC,IAAI,aAAa,kBAEhB;IAED,QAAQ,KAAK,OAAO,IAAI,WAAW,CAAC;IAEpC,QAAQ,KAAK,KAAK,IAAI,WAAW,CAAC;IAElC,QAAQ,CAAC,kBAAkB,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAEhE;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,IAAI,CAAC;IAE3D,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE,WAAW,GAAG,OAAO,CAAC,IAAI,CAAC;IAEvD,QAAQ,CAAC,aAAa,CAAC,OAAO,EAAE;QAAE,UAAU,CAAC,EAAE,UAAU,GAAG,IAAI,CAAA;KAAE,GAAG,IAAI;IAEzE,QAAQ,CAAC,SAAS,CAAC,KAAK,EAAE,UAAU,GAAG,IAAI;IAE3C;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,YAAY,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,sBAAsB,CAAC;IAE9E;;OAEG;IACH,QAAQ,CAAC,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAErC;;OAEG;IACH,QAAQ,CAAC,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAEpC;;OAEG;IACH,QAAQ,CAAC,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;IAEnC;;OAEG;IACH,QAAQ,CAAC,QAAQ,CAAC,OAAO,EAAE;QACzB,SAAS,EAAE,MAAM,CAAC;QAClB,UAAU,EAAE,MAAM,CAAC;QACnB,UAAU,CAAC,EAAE,CAAC,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC;QAClC,eAAe,CAAC,EAAE,MAAM,CAAC;KAC1B,GAAG,OAAO,CAAC,IAAI,CAAC;IAEX,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B;;OAEG;IACH,iBAAiB,IAAI,IAAI;YAIX,aAAa;IAW3B,mBAAmB,CAAC,WAAW,EAAE,cAAc,CAAC,UAAU,CAAC,GAAG,IAAI;CAGnE"}
1
+ {"version":3,"file":"realtime.d.ts","sourceRoot":"","sources":["../../src/llm/realtime.ts"],"names":[],"mappings":";;AAGA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AACtC,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAGtD,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAClD,OAAO,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AACnE,OAAO,KAAK,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEjE,MAAM,MAAM,uBAAuB,GAAG,MAAM,CAAC;AAE7C,MAAM,WAAW,uBAAuB;IACtC,wBAAwB,EAAE,OAAO,CAAC;CACnC;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,UAAU,EAAE,cAAc,CAAC,MAAM,GAAG,WAAW,CAAC,CAAC;IACjD,WAAW,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;IACxC,UAAU,CAAC,EAAE,OAAO,CAAC,CAAC,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC,CAAC;CAC5C;AAED,MAAM,WAAW,sBAAsB;IACrC,aAAa,EAAE,cAAc,CAAC,iBAAiB,CAAC,CAAC;IACjD,cAAc,EAAE,cAAc,CAAC,YAAY,CAAC,CAAC;IAC7C,aAAa,EAAE,OAAO,CAAC;IACvB,qDAAqD;IACrD,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,sBAAsB,CAAC;IAC7B,SAAS,EAAE,MAAM,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,KAAK,CAAC;IACb,WAAW,EAAE,OAAO,CAAC;CACtB;AAED,MAAM,WAAW,oBAAoB;IACnC,iBAAiB,EAAE,OAAO,CAAC;IAC3B,aAAa,EAAE,OAAO,CAAC;IACvB,iBAAiB,EAAE,OAAO,CAAC;IAC3B,uBAAuB,EAAE,OAAO,CAAC;IACjC,WAAW,EAAE,OAAO,CAAC;CACtB;AAED,MAAM,WAAW,2BAA2B;IAC1C,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,+BAA+B;CAAG;AAEnD,8BAAsB,aAAa;IACjC,OAAO,CAAC,aAAa,CAAuB;gBAEhC,YAAY,EAAE,oBAAoB;IAI9C,IAAI,YAAY,yBAEf;IAED,4DAA4D;IAC5D,QAAQ,KAAK,KAAK,IAAI,MAAM,CAAC;IAE7B,QAAQ,CAAC,OAAO,IAAI,eAAe;IAEnC,QAAQ,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAChC;AAED,8BAAsB,eAAgB,SAAQ,YAAY;IACxD,SAAS,CAAC,cAAc,EAAE,aAAa,CAAC;IACxC,OAAO,CAAC,mBAAmB,CAA4C;IACvE,OAAO,CAAC,SAAS,CAAa;gBAElB,aAAa,EAAE,aAAa;IAMxC,IAAI,aAAa,kBAEhB;IAED,QAAQ,KAAK,OAAO,IAAI,WAAW,CAAC;IAEpC,QAAQ,KAAK,KAAK,IAAI,WAAW,CAAC;IAElC,QAAQ,CAAC,kBAAkB,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAEhE;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,IAAI,CAAC;IAE3D,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE,WAAW,GAAG,OAAO,CAAC,IAAI,CAAC;IAEvD,QAAQ,CAAC,aAAa,CAAC,OAAO,EAAE;QAAE,UAAU,CAAC,EAAE,UAAU,GAAG,IAAI,CAAA;KAAE,GAAG,IAAI;IAEzE,QAAQ,CAAC,SAAS,CAAC,KAAK,EAAE,UAAU,GAAG,IAAI;IAE3C;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,YAAY,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,sBAAsB,CAAC;IAE9E;;OAEG;IACH,QAAQ,CAAC,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAErC;;OAEG;IACH,QAAQ,CAAC,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAEpC;;OAEG;IACH,QAAQ,CAAC,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;IAEnC;;OAEG;IACH,QAAQ,CAAC,QAAQ,CAAC,OAAO,EAAE;QACzB,SAAS,EAAE,MAAM,CAAC;QAClB,UAAU,EAAE,MAAM,CAAC;QACnB,UAAU,CAAC,EAAE,CAAC,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC;QAClC,eAAe,CAAC,EAAE,MAAM,CAAC;KAC1B,GAAG,OAAO,CAAC,IAAI,CAAC;IAEX,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B;;OAEG;IACH,iBAAiB,IAAI,IAAI;YAIX,aAAa;IAW3B,mBAAmB,CAAC,WAAW,EAAE,cAAc,CAAC,UAAU,CAAC,GAAG,IAAI;CAGnE"}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/llm/realtime.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { AudioFrame } from '@livekit/rtc-node';\nimport { EventEmitter } from 'events';\nimport type { ReadableStream } from 'node:stream/web';\nimport { DeferredReadableStream } from '../stream/deferred_stream.js';\nimport { Task } from '../utils.js';\nimport type { ChatContext, FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport type InputSpeechStartedEvent = object;\n\nexport interface InputSpeechStoppedEvent {\n userTranscriptionEnabled: boolean;\n}\n\nexport interface MessageGeneration {\n messageId: string;\n textStream: ReadableStream<string>;\n audioStream: ReadableStream<AudioFrame>;\n modalities?: Promise<('text' | 'audio')[]>;\n}\n\nexport interface GenerationCreatedEvent {\n messageStream: ReadableStream<MessageGeneration>;\n functionStream: ReadableStream<FunctionCall>;\n userInitiated: boolean;\n /** Response ID for correlating metrics with spans */\n responseId?: string;\n}\n\nexport interface RealtimeModelError {\n type: 'realtime_model_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport interface RealtimeCapabilities {\n messageTruncation: boolean;\n turnDetection: boolean;\n userTranscription: boolean;\n autoToolReplyGeneration: boolean;\n audioOutput: boolean;\n}\n\nexport interface InputTranscriptionCompleted {\n itemId: string;\n transcript: string;\n isFinal: boolean;\n}\n\nexport interface RealtimeSessionReconnectedEvent {}\n\nexport abstract class RealtimeModel {\n private _capabilities: RealtimeCapabilities;\n\n constructor(capabilities: RealtimeCapabilities) {\n this._capabilities = capabilities;\n }\n\n get capabilities() {\n return this._capabilities;\n }\n\n /** The model name/identifier used by this realtime model */\n abstract get model(): string;\n\n abstract session(): RealtimeSession;\n\n abstract close(): Promise<void>;\n}\n\nexport abstract class RealtimeSession extends EventEmitter {\n protected _realtimeModel: RealtimeModel;\n private deferredInputStream = new DeferredReadableStream<AudioFrame>();\n private _mainTask: Task<void>;\n\n constructor(realtimeModel: RealtimeModel) {\n super();\n this._realtimeModel = realtimeModel;\n this._mainTask = Task.from((controller) => this._mainTaskImpl(controller.signal));\n }\n\n get realtimeModel() {\n return this._realtimeModel;\n }\n\n abstract get chatCtx(): ChatContext;\n\n abstract get tools(): ToolContext;\n\n abstract updateInstructions(instructions: string): Promise<void>;\n\n /**\n * @throws RealtimeError on Timeout\n */\n abstract updateChatCtx(chatCtx: ChatContext): Promise<void>;\n\n abstract updateTools(tools: ToolContext): Promise<void>;\n\n abstract updateOptions(options: { toolChoice?: ToolChoice | null }): void;\n\n abstract pushAudio(frame: AudioFrame): void;\n\n /**\n * @throws RealtimeError on Timeout\n */\n abstract generateReply(instructions?: string): Promise<GenerationCreatedEvent>;\n\n /**\n * Commit the input audio buffer to the server\n */\n abstract commitAudio(): Promise<void>;\n\n /**\n * Clear the input audio buffer to the server\n */\n abstract clearAudio(): Promise<void>;\n\n /**\n * Cancel the current generation (do nothing if no generation is in progress)\n */\n abstract interrupt(): Promise<void>;\n\n /**\n * Truncate the message at the given audio end time\n */\n abstract truncate(options: {\n messageId: string;\n audioEndMs: number;\n modalities?: ('text' | 'audio')[];\n audioTranscript?: string;\n }): Promise<void>;\n\n async close(): Promise<void> {\n this._mainTask.cancel();\n }\n\n /**\n * Notifies the model that user activity has started\n */\n startUserActivity(): void {\n return;\n }\n\n private async _mainTaskImpl(signal: AbortSignal): Promise<void> {\n const reader = this.deferredInputStream.stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done || signal.aborted) {\n break;\n }\n this.pushAudio(value);\n }\n }\n\n setInputAudioStream(audioStream: ReadableStream<AudioFrame>): void {\n this.deferredInputStream.setSource(audioStream);\n }\n}\n"],"mappings":"AAIA,SAAS,oBAAoB;AAE7B,SAAS,8BAA8B;AACvC,SAAS,YAAY;AAiDd,MAAe,cAAc;AAAA,EAC1B;AAAA,EAER,YAAY,cAAoC;AAC9C,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,IAAI,eAAe;AACjB,WAAO,KAAK;AAAA,EACd;AAQF;AAEO,MAAe,wBAAwB,aAAa;AAAA,EAC/C;AAAA,EACF,sBAAsB,IAAI,uBAAmC;AAAA,EAC7D;AAAA,EAER,YAAY,eAA8B;AACxC,UAAM;AACN,SAAK,iBAAiB;AACtB,SAAK,YAAY,KAAK,KAAK,CAAC,eAAe,KAAK,cAAc,WAAW,MAAM,CAAC;AAAA,EAClF;AAAA,EAEA,IAAI,gBAAgB;AAClB,WAAO,KAAK;AAAA,EACd;AAAA,EAiDA,MAAM,QAAuB;AAC3B,SAAK,UAAU,OAAO;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAA0B;AACxB;AAAA,EACF;AAAA,EAEA,MAAc,cAAc,QAAoC;AAC9D,UAAM,SAAS,KAAK,oBAAoB,OAAO,UAAU;AACzD,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AACA,WAAK,UAAU,KAAK;AAAA,IACtB;AAAA,EACF;AAAA,EAEA,oBAAoB,aAA+C;AACjE,SAAK,oBAAoB,UAAU,WAAW;AAAA,EAChD;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/llm/realtime.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { AudioFrame } from '@livekit/rtc-node';\nimport { EventEmitter } from 'events';\nimport type { ReadableStream } from 'node:stream/web';\nimport { DeferredReadableStream } from '../stream/deferred_stream.js';\nimport { Task } from '../utils.js';\nimport type { TimedString } from '../voice/io.js';\nimport type { ChatContext, FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport type InputSpeechStartedEvent = object;\n\nexport interface InputSpeechStoppedEvent {\n userTranscriptionEnabled: boolean;\n}\n\nexport interface MessageGeneration {\n messageId: string;\n /**\n * Text stream that may contain plain strings or TimedString objects with timestamps.\n */\n textStream: ReadableStream<string | TimedString>;\n audioStream: ReadableStream<AudioFrame>;\n modalities?: Promise<('text' | 'audio')[]>;\n}\n\nexport interface GenerationCreatedEvent {\n messageStream: ReadableStream<MessageGeneration>;\n functionStream: ReadableStream<FunctionCall>;\n userInitiated: boolean;\n /** Response ID for correlating metrics with spans */\n responseId?: string;\n}\n\nexport interface RealtimeModelError {\n type: 'realtime_model_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport interface RealtimeCapabilities {\n messageTruncation: boolean;\n turnDetection: boolean;\n userTranscription: boolean;\n autoToolReplyGeneration: boolean;\n audioOutput: boolean;\n}\n\nexport interface InputTranscriptionCompleted {\n itemId: string;\n transcript: string;\n isFinal: boolean;\n}\n\nexport interface RealtimeSessionReconnectedEvent {}\n\nexport abstract class RealtimeModel {\n private _capabilities: RealtimeCapabilities;\n\n constructor(capabilities: RealtimeCapabilities) {\n this._capabilities = capabilities;\n }\n\n get capabilities() {\n return this._capabilities;\n }\n\n /** The model name/identifier used by this realtime model */\n abstract get model(): string;\n\n abstract session(): RealtimeSession;\n\n abstract close(): Promise<void>;\n}\n\nexport abstract class RealtimeSession extends EventEmitter {\n protected _realtimeModel: RealtimeModel;\n private deferredInputStream = new DeferredReadableStream<AudioFrame>();\n private _mainTask: Task<void>;\n\n constructor(realtimeModel: RealtimeModel) {\n super();\n this._realtimeModel = realtimeModel;\n this._mainTask = Task.from((controller) => this._mainTaskImpl(controller.signal));\n }\n\n get realtimeModel() {\n return this._realtimeModel;\n }\n\n abstract get chatCtx(): ChatContext;\n\n abstract get tools(): ToolContext;\n\n abstract updateInstructions(instructions: string): Promise<void>;\n\n /**\n * @throws RealtimeError on Timeout\n */\n abstract updateChatCtx(chatCtx: ChatContext): Promise<void>;\n\n abstract updateTools(tools: ToolContext): Promise<void>;\n\n abstract updateOptions(options: { toolChoice?: ToolChoice | null }): void;\n\n abstract pushAudio(frame: AudioFrame): void;\n\n /**\n * @throws RealtimeError on Timeout\n */\n abstract generateReply(instructions?: string): Promise<GenerationCreatedEvent>;\n\n /**\n * Commit the input audio buffer to the server\n */\n abstract commitAudio(): Promise<void>;\n\n /**\n * Clear the input audio buffer to the server\n */\n abstract clearAudio(): Promise<void>;\n\n /**\n * Cancel the current generation (do nothing if no generation is in progress)\n */\n abstract interrupt(): Promise<void>;\n\n /**\n * Truncate the message at the given audio end time\n */\n abstract truncate(options: {\n messageId: string;\n audioEndMs: number;\n modalities?: ('text' | 'audio')[];\n audioTranscript?: string;\n }): Promise<void>;\n\n async close(): Promise<void> {\n this._mainTask.cancel();\n }\n\n /**\n * Notifies the model that user activity has started\n */\n startUserActivity(): void {\n return;\n }\n\n private async _mainTaskImpl(signal: AbortSignal): Promise<void> {\n const reader = this.deferredInputStream.stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done || signal.aborted) {\n break;\n }\n this.pushAudio(value);\n }\n }\n\n setInputAudioStream(audioStream: ReadableStream<AudioFrame>): void {\n this.deferredInputStream.setSource(audioStream);\n }\n}\n"],"mappings":"AAIA,SAAS,oBAAoB;AAE7B,SAAS,8BAA8B;AACvC,SAAS,YAAY;AAqDd,MAAe,cAAc;AAAA,EAC1B;AAAA,EAER,YAAY,cAAoC;AAC9C,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,IAAI,eAAe;AACjB,WAAO,KAAK;AAAA,EACd;AAQF;AAEO,MAAe,wBAAwB,aAAa;AAAA,EAC/C;AAAA,EACF,sBAAsB,IAAI,uBAAmC;AAAA,EAC7D;AAAA,EAER,YAAY,eAA8B;AACxC,UAAM;AACN,SAAK,iBAAiB;AACtB,SAAK,YAAY,KAAK,KAAK,CAAC,eAAe,KAAK,cAAc,WAAW,MAAM,CAAC;AAAA,EAClF;AAAA,EAEA,IAAI,gBAAgB;AAClB,WAAO,KAAK;AAAA,EACd;AAAA,EAiDA,MAAM,QAAuB;AAC3B,SAAK,UAAU,OAAO;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAA0B;AACxB;AAAA,EACF;AAAA,EAEA,MAAc,cAAc,QAAoC;AAC9D,UAAM,SAAS,KAAK,oBAAoB,OAAO,UAAU;AACzD,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,QAAQ,OAAO,SAAS;AAC1B;AAAA,MACF;AACA,WAAK,UAAU,KAAK;AAAA,IACtB;AAAA,EACF;AAAA,EAEA,oBAAoB,aAA+C;AACjE,SAAK,oBAAoB,UAAU,WAAW;AAAA,EAChD;AACF;","names":[]}
@@ -22,14 +22,16 @@ __export(stream_adapter_exports, {
22
22
  StreamAdapterWrapper: () => StreamAdapterWrapper
23
23
  });
24
24
  module.exports = __toCommonJS(stream_adapter_exports);
25
+ var import_types = require("../types.cjs");
25
26
  var import_utils = require("../utils.cjs");
27
+ var import_io = require("../voice/io.cjs");
26
28
  var import_tts = require("./tts.cjs");
27
29
  class StreamAdapter extends import_tts.TTS {
28
30
  #tts;
29
31
  #sentenceTokenizer;
30
32
  label;
31
33
  constructor(tts, sentenceTokenizer) {
32
- super(tts.sampleRate, tts.numChannels, { streaming: true });
34
+ super(tts.sampleRate, tts.numChannels, { streaming: true, alignedTranscript: true });
33
35
  this.#tts = tts;
34
36
  this.#sentenceTokenizer = sentenceTokenizer;
35
37
  this.label = this.#tts.label;
@@ -59,6 +61,7 @@ class StreamAdapterWrapper extends import_tts.SynthesizeStream {
59
61
  this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;
60
62
  }
61
63
  async run() {
64
+ let cumulativeDuration = 0;
62
65
  const forwardInput = async () => {
63
66
  for await (const input of this.input) {
64
67
  if (this.abortController.signal.aborted) break;
@@ -89,8 +92,19 @@ class StreamAdapterWrapper extends import_tts.SynthesizeStream {
89
92
  const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);
90
93
  await (prevTask == null ? void 0 : prevTask.result);
91
94
  if (controller.signal.aborted) return;
95
+ const timedString = (0, import_io.createTimedString)({
96
+ text: token,
97
+ startTime: cumulativeDuration
98
+ });
99
+ let isFirstFrame = true;
92
100
  for await (const audio of audioStream) {
93
101
  if (controller.signal.aborted) break;
102
+ if (isFirstFrame) {
103
+ audio.frame.userdata[import_types.USERDATA_TIMED_TRANSCRIPT] = [timedString];
104
+ isFirstFrame = false;
105
+ }
106
+ const frameDuration = audio.frame.samplesPerChannel / audio.frame.sampleRate;
107
+ cumulativeDuration += frameDuration;
94
108
  this.queue.put(audio);
95
109
  }
96
110
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/tts/stream_adapter.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { SentenceStream, SentenceTokenizer } from '../tokenize/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { Task } from '../utils.js';\nimport type { ChunkedStream } from './tts.js';\nimport { SynthesizeStream, TTS } from './tts.js';\n\nexport class StreamAdapter extends TTS {\n #tts: TTS;\n #sentenceTokenizer: SentenceTokenizer;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer) {\n super(tts.sampleRate, tts.numChannels, { streaming: true });\n this.#tts = tts;\n this.#sentenceTokenizer = sentenceTokenizer;\n this.label = this.#tts.label;\n this.label = `tts.StreamAdapter<${this.#tts.label}>`;\n\n this.#tts.on('metrics_collected', (metrics) => {\n this.emit('metrics_collected', metrics);\n });\n this.#tts.on('error', (error) => {\n this.emit('error', error);\n });\n }\n\n synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream {\n return this.#tts.synthesize(text, connOptions, abortSignal);\n }\n\n stream(options?: { connOptions?: APIConnectOptions }): StreamAdapterWrapper {\n return new StreamAdapterWrapper(this.#tts, this.#sentenceTokenizer, options?.connOptions);\n }\n}\n\nexport class StreamAdapterWrapper extends SynthesizeStream {\n #tts: TTS;\n #sentenceStream: SentenceStream;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer, connOptions?: APIConnectOptions) {\n super(tts, connOptions);\n this.#tts = tts;\n this.#sentenceStream = sentenceTokenizer.stream();\n this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;\n }\n\n protected async run() {\n const forwardInput = async () => {\n for await (const input of this.input) {\n if (this.abortController.signal.aborted) break;\n\n if (input === SynthesizeStream.FLUSH_SENTINEL) {\n this.#sentenceStream.flush();\n } else {\n this.#sentenceStream.pushText(input);\n }\n }\n this.#sentenceStream.endInput();\n this.#sentenceStream.close();\n };\n\n const synthesizeSentenceStream = async () => {\n let task: Task<void> | undefined;\n const tokenCompletionTasks: Task<void>[] = [];\n\n for await (const ev of this.#sentenceStream) {\n if (this.abortController.signal.aborted) break;\n\n // this will enable non-blocking synthesis of the stream of tokens\n task = Task.from(\n (controller) => synthesize(ev.token, task, controller),\n this.abortController,\n );\n\n tokenCompletionTasks.push(task);\n }\n\n await Promise.all(tokenCompletionTasks.map((t) => t.result));\n this.queue.put(SynthesizeStream.END_OF_STREAM);\n };\n\n const synthesize = async (\n token: string,\n prevTask: Task<void> | undefined,\n controller: AbortController,\n ) => {\n const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);\n\n // wait for previous audio transcription to complete before starting\n // to queuing audio frames of the current token\n await prevTask?.result;\n if (controller.signal.aborted) return;\n\n for await (const audio of audioStream) {\n if (controller.signal.aborted) break;\n this.queue.put(audio);\n }\n };\n\n await Promise.all([forwardInput(), synthesizeSentenceStream()]);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,mBAAqB;AAErB,iBAAsC;AAE/B,MAAM,sBAAsB,eAAI;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC;AAC1D,UAAM,IAAI,YAAY,IAAI,aAAa,EAAE,WAAW,KAAK,CAAC;AAC1D,SAAK,OAAO;AACZ,SAAK,qBAAqB;AAC1B,SAAK,QAAQ,KAAK,KAAK;AACvB,SAAK,QAAQ,qBAAqB,KAAK,KAAK,KAAK;AAEjD,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY;AAC7C,WAAK,KAAK,qBAAqB,OAAO;AAAA,IACxC,CAAC;AACD,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU;AAC/B,WAAK,KAAK,SAAS,KAAK;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,WACE,MACA,aACA,aACe;AACf,WAAO,KAAK,KAAK,WAAW,MAAM,aAAa,WAAW;AAAA,EAC5D;AAAA,EAEA,OAAO,SAAqE;AAC1E,WAAO,IAAI,qBAAqB,KAAK,MAAM,KAAK,oBAAoB,mCAAS,WAAW;AAAA,EAC1F;AACF;AAEO,MAAM,6BAA6B,4BAAiB;AAAA,EACzD;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC,aAAiC;AAC3F,UAAM,KAAK,WAAW;AACtB,SAAK,OAAO;AACZ,SAAK,kBAAkB,kBAAkB,OAAO;AAChD,SAAK,QAAQ,4BAA4B,KAAK,KAAK,KAAK;AAAA,EAC1D;AAAA,EAEA,MAAgB,MAAM;AACpB,UAAM,eAAe,YAAY;AAC/B,uBAAiB,SAAS,KAAK,OAAO;AACpC,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAEzC,YAAI,UAAU,4BAAiB,gBAAgB;AAC7C,eAAK,gBAAgB,MAAM;AAAA,QAC7B,OAAO;AACL,eAAK,gBAAgB,SAAS,KAAK;AAAA,QACrC;AAAA,MACF;AACA,WAAK,gBAAgB,SAAS;AAC9B,WAAK,gBAAgB,MAAM;AAAA,IAC7B;AAEA,UAAM,2BAA2B,YAAY;AAC3C,UAAI;AACJ,YAAM,uBAAqC,CAAC;AAE5C,uBAAiB,MAAM,KAAK,iBAAiB;AAC3C,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAGzC,eAAO,kBAAK;AAAA,UACV,CAAC,eAAe,WAAW,GAAG,OAAO,MAAM,UAAU;AAAA,UACrD,KAAK;AAAA,QACP;AAEA,6BAAqB,KAAK,IAAI;AAAA,MAChC;AAEA,YAAM,QAAQ,IAAI,qBAAqB,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC;AAC3D,WAAK,MAAM,IAAI,4BAAiB,aAAa;AAAA,IAC/C;AAEA,UAAM,aAAa,OACjB,OACA,UACA,eACG;AACH,YAAM,cAAc,KAAK,KAAK,WAAW,OAAO,KAAK,aAAa,KAAK,WAAW;AAIlF,aAAM,qCAAU;AAChB,UAAI,WAAW,OAAO,QAAS;AAE/B,uBAAiB,SAAS,aAAa;AACrC,YAAI,WAAW,OAAO,QAAS;AAC/B,aAAK,MAAM,IAAI,KAAK;AAAA,MACtB;AAAA,IACF;AAEA,UAAM,QAAQ,IAAI,CAAC,aAAa,GAAG,yBAAyB,CAAC,CAAC;AAAA,EAChE;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/tts/stream_adapter.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { SentenceStream, SentenceTokenizer } from '../tokenize/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { USERDATA_TIMED_TRANSCRIPT } from '../types.js';\nimport { Task } from '../utils.js';\nimport { createTimedString } from '../voice/io.js';\nimport type { ChunkedStream } from './tts.js';\nimport { SynthesizeStream, TTS } from './tts.js';\n\nexport class StreamAdapter extends TTS {\n #tts: TTS;\n #sentenceTokenizer: SentenceTokenizer;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer) {\n super(tts.sampleRate, tts.numChannels, { streaming: true, alignedTranscript: true });\n this.#tts = tts;\n this.#sentenceTokenizer = sentenceTokenizer;\n this.label = this.#tts.label;\n this.label = `tts.StreamAdapter<${this.#tts.label}>`;\n\n this.#tts.on('metrics_collected', (metrics) => {\n this.emit('metrics_collected', metrics);\n });\n this.#tts.on('error', (error) => {\n this.emit('error', error);\n });\n }\n\n synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream {\n return this.#tts.synthesize(text, connOptions, abortSignal);\n }\n\n stream(options?: { connOptions?: APIConnectOptions }): StreamAdapterWrapper {\n return new StreamAdapterWrapper(this.#tts, this.#sentenceTokenizer, options?.connOptions);\n }\n}\n\nexport class StreamAdapterWrapper extends SynthesizeStream {\n #tts: TTS;\n #sentenceStream: SentenceStream;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer, connOptions?: APIConnectOptions) {\n super(tts, connOptions);\n this.#tts = tts;\n this.#sentenceStream = sentenceTokenizer.stream();\n this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;\n }\n\n protected async run() {\n let cumulativeDuration = 0;\n\n const forwardInput = async () => {\n for await (const input of this.input) {\n if (this.abortController.signal.aborted) break;\n\n if (input === SynthesizeStream.FLUSH_SENTINEL) {\n this.#sentenceStream.flush();\n } else {\n this.#sentenceStream.pushText(input);\n }\n }\n this.#sentenceStream.endInput();\n this.#sentenceStream.close();\n };\n\n const synthesizeSentenceStream = async () => {\n let task: Task<void> | undefined;\n const tokenCompletionTasks: Task<void>[] = [];\n\n for await (const ev of this.#sentenceStream) {\n if (this.abortController.signal.aborted) break;\n\n // this will enable non-blocking synthesis of the stream of tokens\n task = Task.from(\n (controller) => synthesize(ev.token, task, controller),\n this.abortController,\n );\n\n tokenCompletionTasks.push(task);\n }\n\n await Promise.all(tokenCompletionTasks.map((t) => t.result));\n this.queue.put(SynthesizeStream.END_OF_STREAM);\n };\n\n const synthesize = async (\n token: string,\n prevTask: Task<void> | undefined,\n controller: AbortController,\n ) => {\n const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);\n\n // wait for previous audio transcription to complete before starting\n // to queuing audio frames of the current token\n await prevTask?.result;\n if (controller.signal.aborted) return;\n\n // Create a TimedString with the sentence text and current cumulative duration\n const timedString = createTimedString({\n text: token,\n startTime: cumulativeDuration,\n });\n\n let isFirstFrame = true;\n for await (const audio of audioStream) {\n if (controller.signal.aborted) break;\n\n // Attach the TimedString to the first frame of this sentence\n if (isFirstFrame) {\n audio.frame.userdata[USERDATA_TIMED_TRANSCRIPT] = [timedString];\n isFirstFrame = false;\n }\n\n // Track cumulative duration\n const frameDuration = audio.frame.samplesPerChannel / audio.frame.sampleRate;\n cumulativeDuration += frameDuration;\n\n this.queue.put(audio);\n }\n };\n\n await Promise.all([forwardInput(), synthesizeSentenceStream()]);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,mBAA0C;AAC1C,mBAAqB;AACrB,gBAAkC;AAElC,iBAAsC;AAE/B,MAAM,sBAAsB,eAAI;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC;AAC1D,UAAM,IAAI,YAAY,IAAI,aAAa,EAAE,WAAW,MAAM,mBAAmB,KAAK,CAAC;AACnF,SAAK,OAAO;AACZ,SAAK,qBAAqB;AAC1B,SAAK,QAAQ,KAAK,KAAK;AACvB,SAAK,QAAQ,qBAAqB,KAAK,KAAK,KAAK;AAEjD,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY;AAC7C,WAAK,KAAK,qBAAqB,OAAO;AAAA,IACxC,CAAC;AACD,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU;AAC/B,WAAK,KAAK,SAAS,KAAK;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,WACE,MACA,aACA,aACe;AACf,WAAO,KAAK,KAAK,WAAW,MAAM,aAAa,WAAW;AAAA,EAC5D;AAAA,EAEA,OAAO,SAAqE;AAC1E,WAAO,IAAI,qBAAqB,KAAK,MAAM,KAAK,oBAAoB,mCAAS,WAAW;AAAA,EAC1F;AACF;AAEO,MAAM,6BAA6B,4BAAiB;AAAA,EACzD;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC,aAAiC;AAC3F,UAAM,KAAK,WAAW;AACtB,SAAK,OAAO;AACZ,SAAK,kBAAkB,kBAAkB,OAAO;AAChD,SAAK,QAAQ,4BAA4B,KAAK,KAAK,KAAK;AAAA,EAC1D;AAAA,EAEA,MAAgB,MAAM;AACpB,QAAI,qBAAqB;AAEzB,UAAM,eAAe,YAAY;AAC/B,uBAAiB,SAAS,KAAK,OAAO;AACpC,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAEzC,YAAI,UAAU,4BAAiB,gBAAgB;AAC7C,eAAK,gBAAgB,MAAM;AAAA,QAC7B,OAAO;AACL,eAAK,gBAAgB,SAAS,KAAK;AAAA,QACrC;AAAA,MACF;AACA,WAAK,gBAAgB,SAAS;AAC9B,WAAK,gBAAgB,MAAM;AAAA,IAC7B;AAEA,UAAM,2BAA2B,YAAY;AAC3C,UAAI;AACJ,YAAM,uBAAqC,CAAC;AAE5C,uBAAiB,MAAM,KAAK,iBAAiB;AAC3C,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAGzC,eAAO,kBAAK;AAAA,UACV,CAAC,eAAe,WAAW,GAAG,OAAO,MAAM,UAAU;AAAA,UACrD,KAAK;AAAA,QACP;AAEA,6BAAqB,KAAK,IAAI;AAAA,MAChC;AAEA,YAAM,QAAQ,IAAI,qBAAqB,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC;AAC3D,WAAK,MAAM,IAAI,4BAAiB,aAAa;AAAA,IAC/C;AAEA,UAAM,aAAa,OACjB,OACA,UACA,eACG;AACH,YAAM,cAAc,KAAK,KAAK,WAAW,OAAO,KAAK,aAAa,KAAK,WAAW;AAIlF,aAAM,qCAAU;AAChB,UAAI,WAAW,OAAO,QAAS;AAG/B,YAAM,kBAAc,6BAAkB;AAAA,QACpC,MAAM;AAAA,QACN,WAAW;AAAA,MACb,CAAC;AAED,UAAI,eAAe;AACnB,uBAAiB,SAAS,aAAa;AACrC,YAAI,WAAW,OAAO,QAAS;AAG/B,YAAI,cAAc;AAChB,gBAAM,MAAM,SAAS,sCAAyB,IAAI,CAAC,WAAW;AAC9D,yBAAe;AAAA,QACjB;AAGA,cAAM,gBAAgB,MAAM,MAAM,oBAAoB,MAAM,MAAM;AAClE,8BAAsB;AAEtB,aAAK,MAAM,IAAI,KAAK;AAAA,MACtB;AAAA,IACF;AAEA,UAAM,QAAQ,IAAI,CAAC,aAAa,GAAG,yBAAyB,CAAC,CAAC;AAAA,EAChE;AACF;","names":[]}
@@ -1 +1 @@
1
- {"version":3,"file":"stream_adapter.d.ts","sourceRoot":"","sources":["../../src/tts/stream_adapter.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAkB,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AAC9E,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAErD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAC9C,OAAO,EAAE,gBAAgB,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC;AAEjD,qBAAa,aAAc,SAAQ,GAAG;;IAGpC,KAAK,EAAE,MAAM,CAAC;gBAEF,GAAG,EAAE,GAAG,EAAE,iBAAiB,EAAE,iBAAiB;IAe1D,UAAU,CACR,IAAI,EAAE,MAAM,EACZ,WAAW,CAAC,EAAE,iBAAiB,EAC/B,WAAW,CAAC,EAAE,WAAW,GACxB,aAAa;IAIhB,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,iBAAiB,CAAA;KAAE,GAAG,oBAAoB;CAG5E;AAED,qBAAa,oBAAqB,SAAQ,gBAAgB;;IAGxD,KAAK,EAAE,MAAM,CAAC;gBAEF,GAAG,EAAE,GAAG,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,WAAW,CAAC,EAAE,iBAAiB;cAO3E,GAAG;CAuDpB"}
1
+ {"version":3,"file":"stream_adapter.d.ts","sourceRoot":"","sources":["../../src/tts/stream_adapter.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAkB,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AAC9E,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAIrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAC9C,OAAO,EAAE,gBAAgB,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC;AAEjD,qBAAa,aAAc,SAAQ,GAAG;;IAGpC,KAAK,EAAE,MAAM,CAAC;gBAEF,GAAG,EAAE,GAAG,EAAE,iBAAiB,EAAE,iBAAiB;IAe1D,UAAU,CACR,IAAI,EAAE,MAAM,EACZ,WAAW,CAAC,EAAE,iBAAiB,EAC/B,WAAW,CAAC,EAAE,WAAW,GACxB,aAAa;IAIhB,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,iBAAiB,CAAA;KAAE,GAAG,oBAAoB;CAG5E;AAED,qBAAa,oBAAqB,SAAQ,gBAAgB;;IAGxD,KAAK,EAAE,MAAM,CAAC;gBAEF,GAAG,EAAE,GAAG,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,WAAW,CAAC,EAAE,iBAAiB;cAO3E,GAAG;CA2EpB"}
@@ -1,11 +1,13 @@
1
+ import { USERDATA_TIMED_TRANSCRIPT } from "../types.js";
1
2
  import { Task } from "../utils.js";
3
+ import { createTimedString } from "../voice/io.js";
2
4
  import { SynthesizeStream, TTS } from "./tts.js";
3
5
  class StreamAdapter extends TTS {
4
6
  #tts;
5
7
  #sentenceTokenizer;
6
8
  label;
7
9
  constructor(tts, sentenceTokenizer) {
8
- super(tts.sampleRate, tts.numChannels, { streaming: true });
10
+ super(tts.sampleRate, tts.numChannels, { streaming: true, alignedTranscript: true });
9
11
  this.#tts = tts;
10
12
  this.#sentenceTokenizer = sentenceTokenizer;
11
13
  this.label = this.#tts.label;
@@ -35,6 +37,7 @@ class StreamAdapterWrapper extends SynthesizeStream {
35
37
  this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;
36
38
  }
37
39
  async run() {
40
+ let cumulativeDuration = 0;
38
41
  const forwardInput = async () => {
39
42
  for await (const input of this.input) {
40
43
  if (this.abortController.signal.aborted) break;
@@ -65,8 +68,19 @@ class StreamAdapterWrapper extends SynthesizeStream {
65
68
  const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);
66
69
  await (prevTask == null ? void 0 : prevTask.result);
67
70
  if (controller.signal.aborted) return;
71
+ const timedString = createTimedString({
72
+ text: token,
73
+ startTime: cumulativeDuration
74
+ });
75
+ let isFirstFrame = true;
68
76
  for await (const audio of audioStream) {
69
77
  if (controller.signal.aborted) break;
78
+ if (isFirstFrame) {
79
+ audio.frame.userdata[USERDATA_TIMED_TRANSCRIPT] = [timedString];
80
+ isFirstFrame = false;
81
+ }
82
+ const frameDuration = audio.frame.samplesPerChannel / audio.frame.sampleRate;
83
+ cumulativeDuration += frameDuration;
70
84
  this.queue.put(audio);
71
85
  }
72
86
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/tts/stream_adapter.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { SentenceStream, SentenceTokenizer } from '../tokenize/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { Task } from '../utils.js';\nimport type { ChunkedStream } from './tts.js';\nimport { SynthesizeStream, TTS } from './tts.js';\n\nexport class StreamAdapter extends TTS {\n #tts: TTS;\n #sentenceTokenizer: SentenceTokenizer;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer) {\n super(tts.sampleRate, tts.numChannels, { streaming: true });\n this.#tts = tts;\n this.#sentenceTokenizer = sentenceTokenizer;\n this.label = this.#tts.label;\n this.label = `tts.StreamAdapter<${this.#tts.label}>`;\n\n this.#tts.on('metrics_collected', (metrics) => {\n this.emit('metrics_collected', metrics);\n });\n this.#tts.on('error', (error) => {\n this.emit('error', error);\n });\n }\n\n synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream {\n return this.#tts.synthesize(text, connOptions, abortSignal);\n }\n\n stream(options?: { connOptions?: APIConnectOptions }): StreamAdapterWrapper {\n return new StreamAdapterWrapper(this.#tts, this.#sentenceTokenizer, options?.connOptions);\n }\n}\n\nexport class StreamAdapterWrapper extends SynthesizeStream {\n #tts: TTS;\n #sentenceStream: SentenceStream;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer, connOptions?: APIConnectOptions) {\n super(tts, connOptions);\n this.#tts = tts;\n this.#sentenceStream = sentenceTokenizer.stream();\n this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;\n }\n\n protected async run() {\n const forwardInput = async () => {\n for await (const input of this.input) {\n if (this.abortController.signal.aborted) break;\n\n if (input === SynthesizeStream.FLUSH_SENTINEL) {\n this.#sentenceStream.flush();\n } else {\n this.#sentenceStream.pushText(input);\n }\n }\n this.#sentenceStream.endInput();\n this.#sentenceStream.close();\n };\n\n const synthesizeSentenceStream = async () => {\n let task: Task<void> | undefined;\n const tokenCompletionTasks: Task<void>[] = [];\n\n for await (const ev of this.#sentenceStream) {\n if (this.abortController.signal.aborted) break;\n\n // this will enable non-blocking synthesis of the stream of tokens\n task = Task.from(\n (controller) => synthesize(ev.token, task, controller),\n this.abortController,\n );\n\n tokenCompletionTasks.push(task);\n }\n\n await Promise.all(tokenCompletionTasks.map((t) => t.result));\n this.queue.put(SynthesizeStream.END_OF_STREAM);\n };\n\n const synthesize = async (\n token: string,\n prevTask: Task<void> | undefined,\n controller: AbortController,\n ) => {\n const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);\n\n // wait for previous audio transcription to complete before starting\n // to queuing audio frames of the current token\n await prevTask?.result;\n if (controller.signal.aborted) return;\n\n for await (const audio of audioStream) {\n if (controller.signal.aborted) break;\n this.queue.put(audio);\n }\n };\n\n await Promise.all([forwardInput(), synthesizeSentenceStream()]);\n }\n}\n"],"mappings":"AAKA,SAAS,YAAY;AAErB,SAAS,kBAAkB,WAAW;AAE/B,MAAM,sBAAsB,IAAI;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC;AAC1D,UAAM,IAAI,YAAY,IAAI,aAAa,EAAE,WAAW,KAAK,CAAC;AAC1D,SAAK,OAAO;AACZ,SAAK,qBAAqB;AAC1B,SAAK,QAAQ,KAAK,KAAK;AACvB,SAAK,QAAQ,qBAAqB,KAAK,KAAK,KAAK;AAEjD,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY;AAC7C,WAAK,KAAK,qBAAqB,OAAO;AAAA,IACxC,CAAC;AACD,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU;AAC/B,WAAK,KAAK,SAAS,KAAK;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,WACE,MACA,aACA,aACe;AACf,WAAO,KAAK,KAAK,WAAW,MAAM,aAAa,WAAW;AAAA,EAC5D;AAAA,EAEA,OAAO,SAAqE;AAC1E,WAAO,IAAI,qBAAqB,KAAK,MAAM,KAAK,oBAAoB,mCAAS,WAAW;AAAA,EAC1F;AACF;AAEO,MAAM,6BAA6B,iBAAiB;AAAA,EACzD;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC,aAAiC;AAC3F,UAAM,KAAK,WAAW;AACtB,SAAK,OAAO;AACZ,SAAK,kBAAkB,kBAAkB,OAAO;AAChD,SAAK,QAAQ,4BAA4B,KAAK,KAAK,KAAK;AAAA,EAC1D;AAAA,EAEA,MAAgB,MAAM;AACpB,UAAM,eAAe,YAAY;AAC/B,uBAAiB,SAAS,KAAK,OAAO;AACpC,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAEzC,YAAI,UAAU,iBAAiB,gBAAgB;AAC7C,eAAK,gBAAgB,MAAM;AAAA,QAC7B,OAAO;AACL,eAAK,gBAAgB,SAAS,KAAK;AAAA,QACrC;AAAA,MACF;AACA,WAAK,gBAAgB,SAAS;AAC9B,WAAK,gBAAgB,MAAM;AAAA,IAC7B;AAEA,UAAM,2BAA2B,YAAY;AAC3C,UAAI;AACJ,YAAM,uBAAqC,CAAC;AAE5C,uBAAiB,MAAM,KAAK,iBAAiB;AAC3C,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAGzC,eAAO,KAAK;AAAA,UACV,CAAC,eAAe,WAAW,GAAG,OAAO,MAAM,UAAU;AAAA,UACrD,KAAK;AAAA,QACP;AAEA,6BAAqB,KAAK,IAAI;AAAA,MAChC;AAEA,YAAM,QAAQ,IAAI,qBAAqB,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC;AAC3D,WAAK,MAAM,IAAI,iBAAiB,aAAa;AAAA,IAC/C;AAEA,UAAM,aAAa,OACjB,OACA,UACA,eACG;AACH,YAAM,cAAc,KAAK,KAAK,WAAW,OAAO,KAAK,aAAa,KAAK,WAAW;AAIlF,aAAM,qCAAU;AAChB,UAAI,WAAW,OAAO,QAAS;AAE/B,uBAAiB,SAAS,aAAa;AACrC,YAAI,WAAW,OAAO,QAAS;AAC/B,aAAK,MAAM,IAAI,KAAK;AAAA,MACtB;AAAA,IACF;AAEA,UAAM,QAAQ,IAAI,CAAC,aAAa,GAAG,yBAAyB,CAAC,CAAC;AAAA,EAChE;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/tts/stream_adapter.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { SentenceStream, SentenceTokenizer } from '../tokenize/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { USERDATA_TIMED_TRANSCRIPT } from '../types.js';\nimport { Task } from '../utils.js';\nimport { createTimedString } from '../voice/io.js';\nimport type { ChunkedStream } from './tts.js';\nimport { SynthesizeStream, TTS } from './tts.js';\n\nexport class StreamAdapter extends TTS {\n #tts: TTS;\n #sentenceTokenizer: SentenceTokenizer;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer) {\n super(tts.sampleRate, tts.numChannels, { streaming: true, alignedTranscript: true });\n this.#tts = tts;\n this.#sentenceTokenizer = sentenceTokenizer;\n this.label = this.#tts.label;\n this.label = `tts.StreamAdapter<${this.#tts.label}>`;\n\n this.#tts.on('metrics_collected', (metrics) => {\n this.emit('metrics_collected', metrics);\n });\n this.#tts.on('error', (error) => {\n this.emit('error', error);\n });\n }\n\n synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream {\n return this.#tts.synthesize(text, connOptions, abortSignal);\n }\n\n stream(options?: { connOptions?: APIConnectOptions }): StreamAdapterWrapper {\n return new StreamAdapterWrapper(this.#tts, this.#sentenceTokenizer, options?.connOptions);\n }\n}\n\nexport class StreamAdapterWrapper extends SynthesizeStream {\n #tts: TTS;\n #sentenceStream: SentenceStream;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer, connOptions?: APIConnectOptions) {\n super(tts, connOptions);\n this.#tts = tts;\n this.#sentenceStream = sentenceTokenizer.stream();\n this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;\n }\n\n protected async run() {\n let cumulativeDuration = 0;\n\n const forwardInput = async () => {\n for await (const input of this.input) {\n if (this.abortController.signal.aborted) break;\n\n if (input === SynthesizeStream.FLUSH_SENTINEL) {\n this.#sentenceStream.flush();\n } else {\n this.#sentenceStream.pushText(input);\n }\n }\n this.#sentenceStream.endInput();\n this.#sentenceStream.close();\n };\n\n const synthesizeSentenceStream = async () => {\n let task: Task<void> | undefined;\n const tokenCompletionTasks: Task<void>[] = [];\n\n for await (const ev of this.#sentenceStream) {\n if (this.abortController.signal.aborted) break;\n\n // this will enable non-blocking synthesis of the stream of tokens\n task = Task.from(\n (controller) => synthesize(ev.token, task, controller),\n this.abortController,\n );\n\n tokenCompletionTasks.push(task);\n }\n\n await Promise.all(tokenCompletionTasks.map((t) => t.result));\n this.queue.put(SynthesizeStream.END_OF_STREAM);\n };\n\n const synthesize = async (\n token: string,\n prevTask: Task<void> | undefined,\n controller: AbortController,\n ) => {\n const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);\n\n // wait for previous audio transcription to complete before starting\n // to queuing audio frames of the current token\n await prevTask?.result;\n if (controller.signal.aborted) return;\n\n // Create a TimedString with the sentence text and current cumulative duration\n const timedString = createTimedString({\n text: token,\n startTime: cumulativeDuration,\n });\n\n let isFirstFrame = true;\n for await (const audio of audioStream) {\n if (controller.signal.aborted) break;\n\n // Attach the TimedString to the first frame of this sentence\n if (isFirstFrame) {\n audio.frame.userdata[USERDATA_TIMED_TRANSCRIPT] = [timedString];\n isFirstFrame = false;\n }\n\n // Track cumulative duration\n const frameDuration = audio.frame.samplesPerChannel / audio.frame.sampleRate;\n cumulativeDuration += frameDuration;\n\n this.queue.put(audio);\n }\n };\n\n await Promise.all([forwardInput(), synthesizeSentenceStream()]);\n }\n}\n"],"mappings":"AAKA,SAAS,iCAAiC;AAC1C,SAAS,YAAY;AACrB,SAAS,yBAAyB;AAElC,SAAS,kBAAkB,WAAW;AAE/B,MAAM,sBAAsB,IAAI;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC;AAC1D,UAAM,IAAI,YAAY,IAAI,aAAa,EAAE,WAAW,MAAM,mBAAmB,KAAK,CAAC;AACnF,SAAK,OAAO;AACZ,SAAK,qBAAqB;AAC1B,SAAK,QAAQ,KAAK,KAAK;AACvB,SAAK,QAAQ,qBAAqB,KAAK,KAAK,KAAK;AAEjD,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY;AAC7C,WAAK,KAAK,qBAAqB,OAAO;AAAA,IACxC,CAAC;AACD,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU;AAC/B,WAAK,KAAK,SAAS,KAAK;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,WACE,MACA,aACA,aACe;AACf,WAAO,KAAK,KAAK,WAAW,MAAM,aAAa,WAAW;AAAA,EAC5D;AAAA,EAEA,OAAO,SAAqE;AAC1E,WAAO,IAAI,qBAAqB,KAAK,MAAM,KAAK,oBAAoB,mCAAS,WAAW;AAAA,EAC1F;AACF;AAEO,MAAM,6BAA6B,iBAAiB;AAAA,EACzD;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC,aAAiC;AAC3F,UAAM,KAAK,WAAW;AACtB,SAAK,OAAO;AACZ,SAAK,kBAAkB,kBAAkB,OAAO;AAChD,SAAK,QAAQ,4BAA4B,KAAK,KAAK,KAAK;AAAA,EAC1D;AAAA,EAEA,MAAgB,MAAM;AACpB,QAAI,qBAAqB;AAEzB,UAAM,eAAe,YAAY;AAC/B,uBAAiB,SAAS,KAAK,OAAO;AACpC,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAEzC,YAAI,UAAU,iBAAiB,gBAAgB;AAC7C,eAAK,gBAAgB,MAAM;AAAA,QAC7B,OAAO;AACL,eAAK,gBAAgB,SAAS,KAAK;AAAA,QACrC;AAAA,MACF;AACA,WAAK,gBAAgB,SAAS;AAC9B,WAAK,gBAAgB,MAAM;AAAA,IAC7B;AAEA,UAAM,2BAA2B,YAAY;AAC3C,UAAI;AACJ,YAAM,uBAAqC,CAAC;AAE5C,uBAAiB,MAAM,KAAK,iBAAiB;AAC3C,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAGzC,eAAO,KAAK;AAAA,UACV,CAAC,eAAe,WAAW,GAAG,OAAO,MAAM,UAAU;AAAA,UACrD,KAAK;AAAA,QACP;AAEA,6BAAqB,KAAK,IAAI;AAAA,MAChC;AAEA,YAAM,QAAQ,IAAI,qBAAqB,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC;AAC3D,WAAK,MAAM,IAAI,iBAAiB,aAAa;AAAA,IAC/C;AAEA,UAAM,aAAa,OACjB,OACA,UACA,eACG;AACH,YAAM,cAAc,KAAK,KAAK,WAAW,OAAO,KAAK,aAAa,KAAK,WAAW;AAIlF,aAAM,qCAAU;AAChB,UAAI,WAAW,OAAO,QAAS;AAG/B,YAAM,cAAc,kBAAkB;AAAA,QACpC,MAAM;AAAA,QACN,WAAW;AAAA,MACb,CAAC;AAED,UAAI,eAAe;AACnB,uBAAiB,SAAS,aAAa;AACrC,YAAI,WAAW,OAAO,QAAS;AAG/B,YAAI,cAAc;AAChB,gBAAM,MAAM,SAAS,yBAAyB,IAAI,CAAC,WAAW;AAC9D,yBAAe;AAAA,QACjB;AAGA,cAAM,gBAAgB,MAAM,MAAM,oBAAoB,MAAM,MAAM;AAClE,8BAAsB;AAEtB,aAAK,MAAM,IAAI,KAAK;AAAA,MACtB;AAAA,IACF;AAEA,UAAM,QAAQ,IAAI,CAAC,aAAa,GAAG,yBAAyB,CAAC,CAAC;AAAA,EAChE;AACF;","names":[]}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/tts/tts.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { AudioFrame } from '@livekit/rtc-node';\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport type { ReadableStream } from 'node:stream/web';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { TTSMetrics } from '../metrics/base.js';\nimport { DeferredReadableStream } from '../stream/deferred_stream.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS, intervalForRetry } from '../types.js';\nimport { AsyncIterableQueue, delay, mergeFrames, startSoon, toError } from '../utils.js';\n\n/** SynthesizedAudio is a packet of speech synthesis as returned by the TTS. */\nexport interface SynthesizedAudio {\n /** Request ID (one segment could be made up of multiple requests) */\n requestId: string;\n /** Segment ID, each segment is separated by a flush */\n segmentId: string;\n /** Synthesized audio frame */\n frame: AudioFrame;\n /** Current segment of the synthesized audio */\n deltaText?: string;\n /** Whether this is the last frame of the segment (streaming only) */\n final: boolean;\n}\n\n/**\n * Describes the capabilities of the TTS provider.\n *\n * @remarks\n * At present, only `streaming` is supplied to this interface, and the framework only supports\n * providers that do have a streaming endpoint.\n */\nexport interface TTSCapabilities {\n streaming: boolean;\n}\n\nexport interface TTSError {\n type: 'tts_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type TTSCallbacks = {\n ['metrics_collected']: (metrics: TTSMetrics) => void;\n ['error']: (error: TTSError) => void;\n};\n\n/**\n * An instance of a text-to-speech adapter.\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child TTS class, which inherits this class's methods.\n */\nexport abstract class TTS extends (EventEmitter as new () => TypedEmitter<TTSCallbacks>) {\n #capabilities: TTSCapabilities;\n #sampleRate: number;\n #numChannels: number;\n abstract label: string;\n\n constructor(sampleRate: number, numChannels: number, capabilities: TTSCapabilities) {\n super();\n this.#capabilities = capabilities;\n this.#sampleRate = sampleRate;\n this.#numChannels = numChannels;\n }\n\n /** Returns this TTS's capabilities */\n get capabilities(): TTSCapabilities {\n return this.#capabilities;\n }\n\n /** Returns the sample rate of audio frames returned by this TTS */\n get sampleRate(): number {\n return this.#sampleRate;\n }\n\n /** Returns the channel count of audio frames returned by this TTS */\n get numChannels(): number {\n return this.#numChannels;\n }\n\n /**\n * Receives text and returns synthesis in the form of a {@link ChunkedStream}\n */\n abstract synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream;\n\n /**\n * Returns a {@link SynthesizeStream} that can be used to push text and receive audio data\n *\n * @param options - Optional configuration including connection options\n */\n abstract stream(options?: { connOptions?: APIConnectOptions }): SynthesizeStream;\n\n async close(): Promise<void> {\n return;\n }\n}\n\n/**\n * An instance of a text-to-speech stream, as an asynchronous iterable iterator.\n *\n * @example Looping through frames\n * ```ts\n * for await (const event of stream) {\n * await source.captureFrame(event.frame);\n * }\n * ```\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child SynthesizeStream class, which inherits this class's methods.\n */\nexport abstract class SynthesizeStream\n implements AsyncIterableIterator<SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM>\n{\n protected static readonly FLUSH_SENTINEL = Symbol('FLUSH_SENTINEL');\n static readonly END_OF_STREAM = Symbol('END_OF_STREAM');\n protected input = new AsyncIterableQueue<string | typeof SynthesizeStream.FLUSH_SENTINEL>();\n protected queue = new AsyncIterableQueue<\n SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM\n >();\n protected output = new AsyncIterableQueue<\n SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM\n >();\n protected closed = false;\n protected connOptions: APIConnectOptions;\n protected abortController = new AbortController();\n\n private deferredInputStream: DeferredReadableStream<\n string | typeof SynthesizeStream.FLUSH_SENTINEL\n >;\n private logger = log();\n\n abstract label: string;\n\n #tts: TTS;\n #metricsPendingTexts: string[] = [];\n #metricsText = '';\n #monitorMetricsTask?: Promise<void>;\n #ttsRequestSpan?: Span;\n\n constructor(tts: TTS, connOptions: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS) {\n this.#tts = tts;\n this.connOptions = connOptions;\n this.deferredInputStream = new DeferredReadableStream();\n this.pumpInput();\n\n this.abortController.signal.addEventListener('abort', () => {\n this.deferredInputStream.detachSource();\n // TODO (AJS-36) clean this up when we refactor with streams\n if (!this.input.closed) this.input.close();\n if (!this.output.closed) this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#ttsRequestSpan = span;\n span.setAttributes({\n [traceTypes.ATTR_TTS_STREAMING]: true,\n [traceTypes.ATTR_TTS_LABEL]: this.#tts.label,\n });\n\n for (let i = 0; i < this.connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'tts_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this.connOptions, i);\n\n if (this.connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this.connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate TTS completion after ${this.connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n // Don't emit error event for recoverable errors during retry loop\n // to avoid ERR_UNHANDLED_ERROR or premature session termination\n this.logger.warn(\n { tts: this.#tts.label, attempt: i + 1, error },\n `failed to synthesize speech, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'tts_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#tts.emit('error', {\n type: 'tts_error',\n timestamp: Date.now(),\n label: this.#tts.label,\n error,\n recoverable,\n });\n }\n\n // NOTE(AJS-37): The implementation below uses an AsyncIterableQueue (`this.input`)\n // bridged from a DeferredReadableStream (`this.deferredInputStream`) rather than\n // consuming the stream directly.\n //\n // A full refactor to native Web Streams was considered but is currently deferred.\n // The primary reason is to maintain architectural parity with the Python SDK,\n // which is a key design goal for the project. This ensures a consistent developer\n // experience across both platforms.\n //\n // For more context, see the discussion in GitHub issue # 844.\n protected async pumpInput() {\n const reader = this.deferredInputStream.stream.getReader();\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done || value === SynthesizeStream.FLUSH_SENTINEL) {\n break;\n }\n this.pushText(value);\n }\n this.endInput();\n } catch (error) {\n this.logger.error(error, 'Error reading deferred input stream');\n } finally {\n reader.releaseLock();\n // Ensure output is closed when the stream ends\n if (!this.#monitorMetricsTask) {\n // No text was received, close the output directly\n this.output.close();\n }\n }\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let audioDurationMs = 0;\n let ttfb: bigint = BigInt(-1);\n let requestId = '';\n\n const emit = () => {\n if (this.#metricsPendingTexts.length) {\n const text = this.#metricsPendingTexts.shift()!;\n const duration = process.hrtime.bigint() - startTime;\n const roundedAudioDurationMs = Math.round(audioDurationMs);\n const metrics: TTSMetrics = {\n type: 'tts_metrics',\n timestamp: Date.now(),\n requestId,\n ttfbMs: ttfb === BigInt(-1) ? -1 : Math.trunc(Number(ttfb / BigInt(1000000))),\n durationMs: Math.trunc(Number(duration / BigInt(1000000))),\n charactersCount: text.length,\n audioDurationMs: roundedAudioDurationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#tts.label,\n streamed: false,\n };\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.setAttribute(traceTypes.ATTR_TTS_METRICS, JSON.stringify(metrics));\n }\n this.#tts.emit('metrics_collected', metrics);\n }\n };\n\n for await (const audio of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(audio);\n if (audio === SynthesizeStream.END_OF_STREAM) continue;\n requestId = audio.requestId;\n if (ttfb === BigInt(-1)) {\n ttfb = process.hrtime.bigint() - startTime;\n }\n // TODO(AJS-102): use frame.durationMs once available in rtc-node\n audioDurationMs += (audio.frame.samplesPerChannel / audio.frame.sampleRate) * 1000;\n if (audio.final) {\n emit();\n }\n }\n\n if (requestId) {\n emit();\n }\n\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.end();\n this.#ttsRequestSpan = undefined;\n }\n }\n\n protected abstract run(): Promise<void>;\n\n updateInputStream(text: ReadableStream<string>) {\n this.deferredInputStream.setSource(text);\n }\n\n /** Push a string of text to the TTS */\n /** @deprecated Use `updateInputStream` instead */\n pushText(text: string) {\n if (!this.#monitorMetricsTask) {\n this.#monitorMetricsTask = this.monitorMetrics();\n // Close output when metrics task completes\n this.#monitorMetricsTask.finally(() => this.output.close());\n }\n this.#metricsText += text;\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.put(text);\n }\n\n /** Flush the TTS, causing it to process all pending text */\n flush() {\n if (this.#metricsText) {\n this.#metricsPendingTexts.push(this.#metricsText);\n this.#metricsText = '';\n }\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.put(SynthesizeStream.FLUSH_SENTINEL);\n }\n\n /** Mark the input as ended and forbid additional pushes */\n endInput() {\n this.flush();\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.close();\n }\n\n next(): Promise<IteratorResult<SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM>> {\n return this.output.next();\n }\n\n get abortSignal(): AbortSignal {\n return this.abortController.signal;\n }\n\n /** Close both the input and output of the TTS stream */\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): SynthesizeStream {\n return this;\n }\n}\n\n/**\n * An instance of a text-to-speech response, as an asynchronous iterable iterator.\n *\n * @example Looping through frames\n * ```ts\n * for await (const event of stream) {\n * await source.captureFrame(event.frame);\n * }\n * ```\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child ChunkedStream class, which inherits this class's methods.\n */\nexport abstract class ChunkedStream implements AsyncIterableIterator<SynthesizedAudio> {\n protected queue = new AsyncIterableQueue<SynthesizedAudio>();\n protected output = new AsyncIterableQueue<SynthesizedAudio>();\n protected closed = false;\n abstract label: string;\n #text: string;\n #tts: TTS;\n #ttsRequestSpan?: Span;\n private _connOptions: APIConnectOptions;\n private logger = log();\n\n protected abortController = new AbortController();\n\n constructor(\n text: string,\n tts: TTS,\n connOptions: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,\n abortSignal?: AbortSignal,\n ) {\n this.#text = text;\n this.#tts = tts;\n this._connOptions = connOptions;\n\n if (abortSignal) {\n abortSignal.addEventListener('abort', () => this.abortController.abort(), { once: true });\n }\n\n this.monitorMetrics();\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n Promise.resolve().then(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#ttsRequestSpan = span;\n span.setAttributes({\n [traceTypes.ATTR_TTS_STREAMING]: false,\n [traceTypes.ATTR_TTS_LABEL]: this.#tts.label,\n });\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'tts_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this._connOptions, i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate TTS completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n // Don't emit error event for recoverable errors during retry loop\n // to avoid ERR_UNHANDLED_ERROR or premature session termination\n this.logger.warn(\n { tts: this.#tts.label, attempt: i + 1, error },\n `failed to generate TTS completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private async mainTask() {\n return tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'tts_request',\n endOnExit: false,\n });\n }\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#tts.emit('error', {\n type: 'tts_error',\n timestamp: Date.now(),\n label: this.#tts.label,\n error,\n recoverable,\n });\n }\n\n protected abstract run(): Promise<void>;\n\n get inputText(): string {\n return this.#text;\n }\n\n get abortSignal(): AbortSignal {\n return this.abortController.signal;\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let audioDurationMs = 0;\n let ttfb: bigint = BigInt(-1);\n let requestId = '';\n\n for await (const audio of this.queue) {\n this.output.put(audio);\n requestId = audio.requestId;\n if (ttfb === BigInt(-1)) {\n ttfb = process.hrtime.bigint() - startTime;\n }\n audioDurationMs += (audio.frame.samplesPerChannel / audio.frame.sampleRate) * 1000;\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const metrics: TTSMetrics = {\n type: 'tts_metrics',\n timestamp: Date.now(),\n requestId,\n ttfbMs: ttfb === BigInt(-1) ? -1 : Math.trunc(Number(ttfb / BigInt(1000000))),\n durationMs: Math.trunc(Number(duration / BigInt(1000000))),\n charactersCount: this.#text.length,\n audioDurationMs: Math.round(audioDurationMs),\n cancelled: false, // TODO(AJS-186): support ChunkedStream with 1.0 - add this.abortController.signal.aborted here\n label: this.#tts.label,\n streamed: false,\n };\n\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.setAttribute(traceTypes.ATTR_TTS_METRICS, JSON.stringify(metrics));\n this.#ttsRequestSpan.end();\n this.#ttsRequestSpan = undefined;\n }\n\n this.#tts.emit('metrics_collected', metrics);\n }\n\n /** Collect every frame into one in a single call */\n async collect(): Promise<AudioFrame> {\n const frames = [];\n for await (const event of this) {\n frames.push(event.frame);\n }\n return mergeFrames(frames);\n }\n\n next(): Promise<IteratorResult<SynthesizedAudio>> {\n return this.output.next();\n }\n\n /** Close both the input and output of the TTS stream */\n close() {\n if (!this.queue.closed) this.queue.close();\n if (!this.output.closed) this.output.close();\n if (!this.abortController.signal.aborted) this.abortController.abort();\n this.closed = true;\n }\n\n [Symbol.asyncIterator](): ChunkedStream {\n return this;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAMA,yBAA6B;AAE7B,wBAA6C;AAC7C,iBAAoB;AAEpB,6BAAuC;AACvC,uBAAoD;AACpD,mBAAsF;AACtF,mBAA2E;AA+CpE,MAAe,YAAa,gCAAsD;AAAA,EACvF;AAAA,EACA;AAAA,EACA;AAAA,EAGA,YAAY,YAAoB,aAAqB,cAA+B;AAClF,UAAM;AACN,SAAK,gBAAgB;AACrB,SAAK,cAAc;AACnB,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA,EAGA,IAAI,eAAgC;AAClC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,aAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAsB;AACxB,WAAO,KAAK;AAAA,EACd;AAAA,EAkBA,MAAM,QAAuB;AAC3B;AAAA,EACF;AACF;AAgBO,MAAe,iBAEtB;AAAA,EACE,OAA0B,iBAAiB,OAAO,gBAAgB;AAAA,EAClE,OAAgB,gBAAgB,OAAO,eAAe;AAAA,EAC5C,QAAQ,IAAI,gCAAoE;AAAA,EAChF,QAAQ,IAAI,gCAEpB;AAAA,EACQ,SAAS,IAAI,gCAErB;AAAA,EACQ,SAAS;AAAA,EACT;AAAA,EACA,kBAAkB,IAAI,gBAAgB;AAAA,EAExC;AAAA,EAGA,aAAS,gBAAI;AAAA,EAIrB;AAAA,EACA,uBAAiC,CAAC;AAAA,EAClC,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,cAAiC,0CAA6B;AAClF,SAAK,OAAO;AACZ,SAAK,cAAc;AACnB,SAAK,sBAAsB,IAAI,8CAAuB;AACtD,SAAK,UAAU;AAEf,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAC1D,WAAK,oBAAoB,aAAa;AAEtC,UAAI,CAAC,KAAK,MAAM,OAAQ,MAAK,MAAM,MAAM;AACzC,UAAI,CAAC,KAAK,OAAO,OAAQ,MAAK,OAAO,MAAM;AAC3C,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,gCAAU,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EACnE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,cAAc;AAAA,MACjB,CAAC,4BAAW,kBAAkB,GAAG;AAAA,MACjC,CAAC,4BAAW,cAAc,GAAG,KAAK,KAAK;AAAA,IACzC,CAAC;AAED,aAAS,IAAI,GAAG,IAAI,KAAK,YAAY,WAAW,GAAG,KAAK;AACtD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,aAAa,CAAC;AAE1D,cAAI,KAAK,YAAY,aAAa,KAAK,CAAC,MAAM,WAAW;AACvD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,YAAY,UAAU;AAC1C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,YAAY,WAAW,CAAC;AAAA,cACjF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AAGL,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,OAAO,SAAS,IAAI,GAAG,MAAM;AAAA,cAC9C,6CAA6C,aAAa;AAAA,YAC5D;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAgB,YAAY;AAC1B,UAAM,SAAS,KAAK,oBAAoB,OAAO,UAAU;AACzD,QAAI;AACF,aAAO,MAAM;AACX,cAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,YAAI,QAAQ,UAAU,iBAAiB,gBAAgB;AACrD;AAAA,QACF;AACA,aAAK,SAAS,KAAK;AAAA,MACrB;AACA,WAAK,SAAS;AAAA,IAChB,SAAS,OAAO;AACd,WAAK,OAAO,MAAM,OAAO,qCAAqC;AAAA,IAChE,UAAE;AACA,aAAO,YAAY;AAEnB,UAAI,CAAC,KAAK,qBAAqB;AAE7B,aAAK,OAAO,MAAM;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,kBAAkB;AACtB,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAEhB,UAAM,OAAO,MAAM;AACjB,UAAI,KAAK,qBAAqB,QAAQ;AACpC,cAAM,OAAO,KAAK,qBAAqB,MAAM;AAC7C,cAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,cAAM,yBAAyB,KAAK,MAAM,eAAe;AACzD,cAAM,UAAsB;AAAA,UAC1B,MAAM;AAAA,UACN,WAAW,KAAK,IAAI;AAAA,UACpB;AAAA,UACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,UAC5E,YAAY,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAAA,UACzD,iBAAiB,KAAK;AAAA,UACtB,iBAAiB;AAAA,UACjB,WAAW,KAAK,gBAAgB,OAAO;AAAA,UACvC,OAAO,KAAK,KAAK;AAAA,UACjB,UAAU;AAAA,QACZ;AACA,YAAI,KAAK,iBAAiB;AACxB,eAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAAA,QACxF;AACA,aAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,MAC7C;AAAA,IACF;AAEA,qBAAiB,SAAS,KAAK,OAAO;AACpC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,KAAK;AACrB,UAAI,UAAU,iBAAiB,cAAe;AAC9C,kBAAY,MAAM;AAClB,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AAEA,yBAAoB,MAAM,MAAM,oBAAoB,MAAM,MAAM,aAAc;AAC9E,UAAI,MAAM,OAAO;AACf,aAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW;AACb,WAAK;AAAA,IACP;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,IAAI;AACzB,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA,EAIA,kBAAkB,MAA8B;AAC9C,SAAK,oBAAoB,UAAU,IAAI;AAAA,EACzC;AAAA;AAAA;AAAA,EAIA,SAAS,MAAc;AACrB,QAAI,CAAC,KAAK,qBAAqB;AAC7B,WAAK,sBAAsB,KAAK,eAAe;AAE/C,WAAK,oBAAoB,QAAQ,MAAM,KAAK,OAAO,MAAM,CAAC;AAAA,IAC5D;AACA,SAAK,gBAAgB;AAErB,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,IAAI;AAAA,EACrB;AAAA;AAAA,EAGA,QAAQ;AACN,QAAI,KAAK,cAAc;AACrB,WAAK,qBAAqB,KAAK,KAAK,YAAY;AAChD,WAAK,eAAe;AAAA,IACtB;AAEA,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,iBAAiB,cAAc;AAAA,EAChD;AAAA;AAAA,EAGA,WAAW;AACT,SAAK,MAAM;AAEX,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,MAAM;AAAA,EACnB;AAAA,EAEA,OAA0F;AACxF,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,IAAI,cAA2B;AAC7B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA;AAAA,EAGA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAsB;AACzC,WAAO;AAAA,EACT;AACF;AAgBO,MAAe,cAAiE;AAAA,EAC3E,QAAQ,IAAI,gCAAqC;AAAA,EACjD,SAAS,IAAI,gCAAqC;AAAA,EAClD,SAAS;AAAA,EAEnB;AAAA,EACA;AAAA,EACA;AAAA,EACQ;AAAA,EACA,aAAS,gBAAI;AAAA,EAEX,kBAAkB,IAAI,gBAAgB;AAAA,EAEhD,YACE,MACA,KACA,cAAiC,0CACjC,aACA;AACA,SAAK,QAAQ;AACb,SAAK,OAAO;AACZ,SAAK,eAAe;AAEpB,QAAI,aAAa;AACf,kBAAY,iBAAiB,SAAS,MAAM,KAAK,gBAAgB,MAAM,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA,IAC1F;AAEA,SAAK,eAAe;AAMpB,YAAQ,QAAQ,EAAE,KAAK,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChF;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,cAAc;AAAA,MACjB,CAAC,4BAAW,kBAAkB,GAAG;AAAA,MACjC,CAAC,4BAAW,cAAc,GAAG,KAAK,KAAK;AAAA,IACzC,CAAC;AAED,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,cAAc,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AAGL,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,OAAO,SAAS,IAAI,GAAG,MAAM;AAAA,cAC9C,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,WAAW;AACvB,WAAO,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,MACtE,MAAM;AAAA,MACN,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAAA,EAEQ,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAIA,IAAI,YAAoB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,cAA2B;AAC7B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,kBAAkB;AACtB,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAEhB,qBAAiB,SAAS,KAAK,OAAO;AACpC,WAAK,OAAO,IAAI,KAAK;AACrB,kBAAY,MAAM;AAClB,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AACA,yBAAoB,MAAM,MAAM,oBAAoB,MAAM,MAAM,aAAc;AAAA,IAChF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E,YAAY,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAAA,MACzD,iBAAiB,KAAK,MAAM;AAAA,MAC5B,iBAAiB,KAAK,MAAM,eAAe;AAAA,MAC3C,WAAW;AAAA;AAAA,MACX,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU;AAAA,IACZ;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AACtF,WAAK,gBAAgB,IAAI;AACzB,WAAK,kBAAkB;AAAA,IACzB;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAGA,MAAM,UAA+B;AACnC,UAAM,SAAS,CAAC;AAChB,qBAAiB,SAAS,MAAM;AAC9B,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB;AACA,eAAO,0BAAY,MAAM;AAAA,EAC3B;AAAA,EAEA,OAAkD;AAChD,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA;AAAA,EAGA,QAAQ;AACN,QAAI,CAAC,KAAK,MAAM,OAAQ,MAAK,MAAM,MAAM;AACzC,QAAI,CAAC,KAAK,OAAO,OAAQ,MAAK,OAAO,MAAM;AAC3C,QAAI,CAAC,KAAK,gBAAgB,OAAO,QAAS,MAAK,gBAAgB,MAAM;AACrE,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,CAAC,OAAO,aAAa,IAAmB;AACtC,WAAO;AAAA,EACT;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/tts/tts.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { AudioFrame } from '@livekit/rtc-node';\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport type { ReadableStream } from 'node:stream/web';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { TTSMetrics } from '../metrics/base.js';\nimport { DeferredReadableStream } from '../stream/deferred_stream.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS, intervalForRetry } from '../types.js';\nimport { AsyncIterableQueue, delay, mergeFrames, startSoon, toError } from '../utils.js';\nimport type { TimedString } from '../voice/io.js';\n\n/**\n * SynthesizedAudio is a packet of speech synthesis as returned by the TTS.\n */\nexport interface SynthesizedAudio {\n /** Request ID (one segment could be made up of multiple requests) */\n requestId: string;\n /** Segment ID, each segment is separated by a flush */\n segmentId: string;\n /** Synthesized audio frame */\n frame: AudioFrame;\n /** Current segment of the synthesized audio */\n deltaText?: string;\n /** Whether this is the last frame of the segment (streaming only) */\n final: boolean;\n /**\n * Timed transcripts associated with this audio packet (word-level timestamps).\n */\n timedTranscripts?: TimedString[];\n}\n\n/**\n * Describes the capabilities of the TTS provider.\n *\n * @remarks\n * At present, only `streaming` is supplied to this interface, and the framework only supports\n * providers that do have a streaming endpoint.\n */\nexport interface TTSCapabilities {\n streaming: boolean;\n // Whether this TTS supports aligned transcripts (word-level timestamps).\n alignedTranscript?: boolean;\n}\n\nexport interface TTSError {\n type: 'tts_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type TTSCallbacks = {\n ['metrics_collected']: (metrics: TTSMetrics) => void;\n ['error']: (error: TTSError) => void;\n};\n\n/**\n * An instance of a text-to-speech adapter.\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child TTS class, which inherits this class's methods.\n */\nexport abstract class TTS extends (EventEmitter as new () => TypedEmitter<TTSCallbacks>) {\n #capabilities: TTSCapabilities;\n #sampleRate: number;\n #numChannels: number;\n abstract label: string;\n\n constructor(sampleRate: number, numChannels: number, capabilities: TTSCapabilities) {\n super();\n this.#capabilities = capabilities;\n this.#sampleRate = sampleRate;\n this.#numChannels = numChannels;\n }\n\n /** Returns this TTS's capabilities */\n get capabilities(): TTSCapabilities {\n return this.#capabilities;\n }\n\n /** Returns the sample rate of audio frames returned by this TTS */\n get sampleRate(): number {\n return this.#sampleRate;\n }\n\n /** Returns the channel count of audio frames returned by this TTS */\n get numChannels(): number {\n return this.#numChannels;\n }\n\n /**\n * Receives text and returns synthesis in the form of a {@link ChunkedStream}\n */\n abstract synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream;\n\n /**\n * Returns a {@link SynthesizeStream} that can be used to push text and receive audio data\n *\n * @param options - Optional configuration including connection options\n */\n abstract stream(options?: { connOptions?: APIConnectOptions }): SynthesizeStream;\n\n async close(): Promise<void> {\n return;\n }\n}\n\n/**\n * An instance of a text-to-speech stream, as an asynchronous iterable iterator.\n *\n * @example Looping through frames\n * ```ts\n * for await (const event of stream) {\n * await source.captureFrame(event.frame);\n * }\n * ```\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child SynthesizeStream class, which inherits this class's methods.\n */\nexport abstract class SynthesizeStream\n implements AsyncIterableIterator<SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM>\n{\n protected static readonly FLUSH_SENTINEL = Symbol('FLUSH_SENTINEL');\n static readonly END_OF_STREAM = Symbol('END_OF_STREAM');\n protected input = new AsyncIterableQueue<string | typeof SynthesizeStream.FLUSH_SENTINEL>();\n protected queue = new AsyncIterableQueue<\n SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM\n >();\n protected output = new AsyncIterableQueue<\n SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM\n >();\n protected closed = false;\n protected connOptions: APIConnectOptions;\n protected abortController = new AbortController();\n\n private deferredInputStream: DeferredReadableStream<\n string | typeof SynthesizeStream.FLUSH_SENTINEL\n >;\n private logger = log();\n\n abstract label: string;\n\n #tts: TTS;\n #metricsPendingTexts: string[] = [];\n #metricsText = '';\n #monitorMetricsTask?: Promise<void>;\n #ttsRequestSpan?: Span;\n\n constructor(tts: TTS, connOptions: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS) {\n this.#tts = tts;\n this.connOptions = connOptions;\n this.deferredInputStream = new DeferredReadableStream();\n this.pumpInput();\n\n this.abortController.signal.addEventListener('abort', () => {\n this.deferredInputStream.detachSource();\n // TODO (AJS-36) clean this up when we refactor with streams\n if (!this.input.closed) this.input.close();\n if (!this.output.closed) this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#ttsRequestSpan = span;\n span.setAttributes({\n [traceTypes.ATTR_TTS_STREAMING]: true,\n [traceTypes.ATTR_TTS_LABEL]: this.#tts.label,\n });\n\n for (let i = 0; i < this.connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'tts_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this.connOptions, i);\n\n if (this.connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this.connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate TTS completion after ${this.connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n // Don't emit error event for recoverable errors during retry loop\n // to avoid ERR_UNHANDLED_ERROR or premature session termination\n this.logger.warn(\n { tts: this.#tts.label, attempt: i + 1, error },\n `failed to synthesize speech, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'tts_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#tts.emit('error', {\n type: 'tts_error',\n timestamp: Date.now(),\n label: this.#tts.label,\n error,\n recoverable,\n });\n }\n\n // NOTE(AJS-37): The implementation below uses an AsyncIterableQueue (`this.input`)\n // bridged from a DeferredReadableStream (`this.deferredInputStream`) rather than\n // consuming the stream directly.\n //\n // A full refactor to native Web Streams was considered but is currently deferred.\n // The primary reason is to maintain architectural parity with the Python SDK,\n // which is a key design goal for the project. This ensures a consistent developer\n // experience across both platforms.\n //\n // For more context, see the discussion in GitHub issue # 844.\n protected async pumpInput() {\n const reader = this.deferredInputStream.stream.getReader();\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done || value === SynthesizeStream.FLUSH_SENTINEL) {\n break;\n }\n this.pushText(value);\n }\n this.endInput();\n } catch (error) {\n this.logger.error(error, 'Error reading deferred input stream');\n } finally {\n reader.releaseLock();\n // Ensure output is closed when the stream ends\n if (!this.#monitorMetricsTask) {\n // No text was received, close the output directly\n this.output.close();\n }\n }\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let audioDurationMs = 0;\n let ttfb: bigint = BigInt(-1);\n let requestId = '';\n\n const emit = () => {\n if (this.#metricsPendingTexts.length) {\n const text = this.#metricsPendingTexts.shift()!;\n const duration = process.hrtime.bigint() - startTime;\n const roundedAudioDurationMs = Math.round(audioDurationMs);\n const metrics: TTSMetrics = {\n type: 'tts_metrics',\n timestamp: Date.now(),\n requestId,\n ttfbMs: ttfb === BigInt(-1) ? -1 : Math.trunc(Number(ttfb / BigInt(1000000))),\n durationMs: Math.trunc(Number(duration / BigInt(1000000))),\n charactersCount: text.length,\n audioDurationMs: roundedAudioDurationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#tts.label,\n streamed: false,\n };\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.setAttribute(traceTypes.ATTR_TTS_METRICS, JSON.stringify(metrics));\n }\n this.#tts.emit('metrics_collected', metrics);\n }\n };\n\n for await (const audio of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(audio);\n if (audio === SynthesizeStream.END_OF_STREAM) continue;\n requestId = audio.requestId;\n if (ttfb === BigInt(-1)) {\n ttfb = process.hrtime.bigint() - startTime;\n }\n // TODO(AJS-102): use frame.durationMs once available in rtc-node\n audioDurationMs += (audio.frame.samplesPerChannel / audio.frame.sampleRate) * 1000;\n if (audio.final) {\n emit();\n }\n }\n\n if (requestId) {\n emit();\n }\n\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.end();\n this.#ttsRequestSpan = undefined;\n }\n }\n\n protected abstract run(): Promise<void>;\n\n updateInputStream(text: ReadableStream<string>) {\n this.deferredInputStream.setSource(text);\n }\n\n /** Push a string of text to the TTS */\n /** @deprecated Use `updateInputStream` instead */\n pushText(text: string) {\n if (!this.#monitorMetricsTask) {\n this.#monitorMetricsTask = this.monitorMetrics();\n // Close output when metrics task completes\n this.#monitorMetricsTask.finally(() => this.output.close());\n }\n this.#metricsText += text;\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.put(text);\n }\n\n /** Flush the TTS, causing it to process all pending text */\n flush() {\n if (this.#metricsText) {\n this.#metricsPendingTexts.push(this.#metricsText);\n this.#metricsText = '';\n }\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.put(SynthesizeStream.FLUSH_SENTINEL);\n }\n\n /** Mark the input as ended and forbid additional pushes */\n endInput() {\n this.flush();\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.close();\n }\n\n next(): Promise<IteratorResult<SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM>> {\n return this.output.next();\n }\n\n get abortSignal(): AbortSignal {\n return this.abortController.signal;\n }\n\n /** Close both the input and output of the TTS stream */\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): SynthesizeStream {\n return this;\n }\n}\n\n/**\n * An instance of a text-to-speech response, as an asynchronous iterable iterator.\n *\n * @example Looping through frames\n * ```ts\n * for await (const event of stream) {\n * await source.captureFrame(event.frame);\n * }\n * ```\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child ChunkedStream class, which inherits this class's methods.\n */\nexport abstract class ChunkedStream implements AsyncIterableIterator<SynthesizedAudio> {\n protected queue = new AsyncIterableQueue<SynthesizedAudio>();\n protected output = new AsyncIterableQueue<SynthesizedAudio>();\n protected closed = false;\n abstract label: string;\n #text: string;\n #tts: TTS;\n #ttsRequestSpan?: Span;\n private _connOptions: APIConnectOptions;\n private logger = log();\n\n protected abortController = new AbortController();\n\n constructor(\n text: string,\n tts: TTS,\n connOptions: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,\n abortSignal?: AbortSignal,\n ) {\n this.#text = text;\n this.#tts = tts;\n this._connOptions = connOptions;\n\n if (abortSignal) {\n abortSignal.addEventListener('abort', () => this.abortController.abort(), { once: true });\n }\n\n this.monitorMetrics();\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n Promise.resolve().then(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#ttsRequestSpan = span;\n span.setAttributes({\n [traceTypes.ATTR_TTS_STREAMING]: false,\n [traceTypes.ATTR_TTS_LABEL]: this.#tts.label,\n });\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'tts_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this._connOptions, i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate TTS completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n // Don't emit error event for recoverable errors during retry loop\n // to avoid ERR_UNHANDLED_ERROR or premature session termination\n this.logger.warn(\n { tts: this.#tts.label, attempt: i + 1, error },\n `failed to generate TTS completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private async mainTask() {\n return tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'tts_request',\n endOnExit: false,\n });\n }\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#tts.emit('error', {\n type: 'tts_error',\n timestamp: Date.now(),\n label: this.#tts.label,\n error,\n recoverable,\n });\n }\n\n protected abstract run(): Promise<void>;\n\n get inputText(): string {\n return this.#text;\n }\n\n get abortSignal(): AbortSignal {\n return this.abortController.signal;\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let audioDurationMs = 0;\n let ttfb: bigint = BigInt(-1);\n let requestId = '';\n\n for await (const audio of this.queue) {\n this.output.put(audio);\n requestId = audio.requestId;\n if (ttfb === BigInt(-1)) {\n ttfb = process.hrtime.bigint() - startTime;\n }\n audioDurationMs += (audio.frame.samplesPerChannel / audio.frame.sampleRate) * 1000;\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const metrics: TTSMetrics = {\n type: 'tts_metrics',\n timestamp: Date.now(),\n requestId,\n ttfbMs: ttfb === BigInt(-1) ? -1 : Math.trunc(Number(ttfb / BigInt(1000000))),\n durationMs: Math.trunc(Number(duration / BigInt(1000000))),\n charactersCount: this.#text.length,\n audioDurationMs: Math.round(audioDurationMs),\n cancelled: false, // TODO(AJS-186): support ChunkedStream with 1.0 - add this.abortController.signal.aborted here\n label: this.#tts.label,\n streamed: false,\n };\n\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.setAttribute(traceTypes.ATTR_TTS_METRICS, JSON.stringify(metrics));\n this.#ttsRequestSpan.end();\n this.#ttsRequestSpan = undefined;\n }\n\n this.#tts.emit('metrics_collected', metrics);\n }\n\n /** Collect every frame into one in a single call */\n async collect(): Promise<AudioFrame> {\n const frames = [];\n for await (const event of this) {\n frames.push(event.frame);\n }\n return mergeFrames(frames);\n }\n\n next(): Promise<IteratorResult<SynthesizedAudio>> {\n return this.output.next();\n }\n\n /** Close both the input and output of the TTS stream */\n close() {\n if (!this.queue.closed) this.queue.close();\n if (!this.output.closed) this.output.close();\n if (!this.abortController.signal.aborted) this.abortController.abort();\n this.closed = true;\n }\n\n [Symbol.asyncIterator](): ChunkedStream {\n return this;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAMA,yBAA6B;AAE7B,wBAA6C;AAC7C,iBAAoB;AAEpB,6BAAuC;AACvC,uBAAoD;AACpD,mBAAsF;AACtF,mBAA2E;AAwDpE,MAAe,YAAa,gCAAsD;AAAA,EACvF;AAAA,EACA;AAAA,EACA;AAAA,EAGA,YAAY,YAAoB,aAAqB,cAA+B;AAClF,UAAM;AACN,SAAK,gBAAgB;AACrB,SAAK,cAAc;AACnB,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA,EAGA,IAAI,eAAgC;AAClC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,aAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAsB;AACxB,WAAO,KAAK;AAAA,EACd;AAAA,EAkBA,MAAM,QAAuB;AAC3B;AAAA,EACF;AACF;AAgBO,MAAe,iBAEtB;AAAA,EACE,OAA0B,iBAAiB,OAAO,gBAAgB;AAAA,EAClE,OAAgB,gBAAgB,OAAO,eAAe;AAAA,EAC5C,QAAQ,IAAI,gCAAoE;AAAA,EAChF,QAAQ,IAAI,gCAEpB;AAAA,EACQ,SAAS,IAAI,gCAErB;AAAA,EACQ,SAAS;AAAA,EACT;AAAA,EACA,kBAAkB,IAAI,gBAAgB;AAAA,EAExC;AAAA,EAGA,aAAS,gBAAI;AAAA,EAIrB;AAAA,EACA,uBAAiC,CAAC;AAAA,EAClC,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,cAAiC,0CAA6B;AAClF,SAAK,OAAO;AACZ,SAAK,cAAc;AACnB,SAAK,sBAAsB,IAAI,8CAAuB;AACtD,SAAK,UAAU;AAEf,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAC1D,WAAK,oBAAoB,aAAa;AAEtC,UAAI,CAAC,KAAK,MAAM,OAAQ,MAAK,MAAM,MAAM;AACzC,UAAI,CAAC,KAAK,OAAO,OAAQ,MAAK,OAAO,MAAM;AAC3C,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,gCAAU,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EACnE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,cAAc;AAAA,MACjB,CAAC,4BAAW,kBAAkB,GAAG;AAAA,MACjC,CAAC,4BAAW,cAAc,GAAG,KAAK,KAAK;AAAA,IACzC,CAAC;AAED,aAAS,IAAI,GAAG,IAAI,KAAK,YAAY,WAAW,GAAG,KAAK;AACtD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,aAAa,CAAC;AAE1D,cAAI,KAAK,YAAY,aAAa,KAAK,CAAC,MAAM,WAAW;AACvD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,YAAY,UAAU;AAC1C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,YAAY,WAAW,CAAC;AAAA,cACjF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AAGL,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,OAAO,SAAS,IAAI,GAAG,MAAM;AAAA,cAC9C,6CAA6C,aAAa;AAAA,YAC5D;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAgB,YAAY;AAC1B,UAAM,SAAS,KAAK,oBAAoB,OAAO,UAAU;AACzD,QAAI;AACF,aAAO,MAAM;AACX,cAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,YAAI,QAAQ,UAAU,iBAAiB,gBAAgB;AACrD;AAAA,QACF;AACA,aAAK,SAAS,KAAK;AAAA,MACrB;AACA,WAAK,SAAS;AAAA,IAChB,SAAS,OAAO;AACd,WAAK,OAAO,MAAM,OAAO,qCAAqC;AAAA,IAChE,UAAE;AACA,aAAO,YAAY;AAEnB,UAAI,CAAC,KAAK,qBAAqB;AAE7B,aAAK,OAAO,MAAM;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,kBAAkB;AACtB,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAEhB,UAAM,OAAO,MAAM;AACjB,UAAI,KAAK,qBAAqB,QAAQ;AACpC,cAAM,OAAO,KAAK,qBAAqB,MAAM;AAC7C,cAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,cAAM,yBAAyB,KAAK,MAAM,eAAe;AACzD,cAAM,UAAsB;AAAA,UAC1B,MAAM;AAAA,UACN,WAAW,KAAK,IAAI;AAAA,UACpB;AAAA,UACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,UAC5E,YAAY,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAAA,UACzD,iBAAiB,KAAK;AAAA,UACtB,iBAAiB;AAAA,UACjB,WAAW,KAAK,gBAAgB,OAAO;AAAA,UACvC,OAAO,KAAK,KAAK;AAAA,UACjB,UAAU;AAAA,QACZ;AACA,YAAI,KAAK,iBAAiB;AACxB,eAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAAA,QACxF;AACA,aAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,MAC7C;AAAA,IACF;AAEA,qBAAiB,SAAS,KAAK,OAAO;AACpC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,KAAK;AACrB,UAAI,UAAU,iBAAiB,cAAe;AAC9C,kBAAY,MAAM;AAClB,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AAEA,yBAAoB,MAAM,MAAM,oBAAoB,MAAM,MAAM,aAAc;AAC9E,UAAI,MAAM,OAAO;AACf,aAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW;AACb,WAAK;AAAA,IACP;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,IAAI;AACzB,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA,EAIA,kBAAkB,MAA8B;AAC9C,SAAK,oBAAoB,UAAU,IAAI;AAAA,EACzC;AAAA;AAAA;AAAA,EAIA,SAAS,MAAc;AACrB,QAAI,CAAC,KAAK,qBAAqB;AAC7B,WAAK,sBAAsB,KAAK,eAAe;AAE/C,WAAK,oBAAoB,QAAQ,MAAM,KAAK,OAAO,MAAM,CAAC;AAAA,IAC5D;AACA,SAAK,gBAAgB;AAErB,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,IAAI;AAAA,EACrB;AAAA;AAAA,EAGA,QAAQ;AACN,QAAI,KAAK,cAAc;AACrB,WAAK,qBAAqB,KAAK,KAAK,YAAY;AAChD,WAAK,eAAe;AAAA,IACtB;AAEA,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,iBAAiB,cAAc;AAAA,EAChD;AAAA;AAAA,EAGA,WAAW;AACT,SAAK,MAAM;AAEX,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,MAAM;AAAA,EACnB;AAAA,EAEA,OAA0F;AACxF,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,IAAI,cAA2B;AAC7B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA;AAAA,EAGA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAsB;AACzC,WAAO;AAAA,EACT;AACF;AAgBO,MAAe,cAAiE;AAAA,EAC3E,QAAQ,IAAI,gCAAqC;AAAA,EACjD,SAAS,IAAI,gCAAqC;AAAA,EAClD,SAAS;AAAA,EAEnB;AAAA,EACA;AAAA,EACA;AAAA,EACQ;AAAA,EACA,aAAS,gBAAI;AAAA,EAEX,kBAAkB,IAAI,gBAAgB;AAAA,EAEhD,YACE,MACA,KACA,cAAiC,0CACjC,aACA;AACA,SAAK,QAAQ;AACb,SAAK,OAAO;AACZ,SAAK,eAAe;AAEpB,QAAI,aAAa;AACf,kBAAY,iBAAiB,SAAS,MAAM,KAAK,gBAAgB,MAAM,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA,IAC1F;AAEA,SAAK,eAAe;AAMpB,YAAQ,QAAQ,EAAE,KAAK,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChF;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,cAAc;AAAA,MACjB,CAAC,4BAAW,kBAAkB,GAAG;AAAA,MACjC,CAAC,4BAAW,cAAc,GAAG,KAAK,KAAK;AAAA,IACzC,CAAC;AAED,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,cAAc,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AAGL,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,OAAO,SAAS,IAAI,GAAG,MAAM;AAAA,cAC9C,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,WAAW;AACvB,WAAO,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,MACtE,MAAM;AAAA,MACN,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAAA,EAEQ,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAIA,IAAI,YAAoB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,cAA2B;AAC7B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,kBAAkB;AACtB,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAEhB,qBAAiB,SAAS,KAAK,OAAO;AACpC,WAAK,OAAO,IAAI,KAAK;AACrB,kBAAY,MAAM;AAClB,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AACA,yBAAoB,MAAM,MAAM,oBAAoB,MAAM,MAAM,aAAc;AAAA,IAChF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E,YAAY,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAAA,MACzD,iBAAiB,KAAK,MAAM;AAAA,MAC5B,iBAAiB,KAAK,MAAM,eAAe;AAAA,MAC3C,WAAW;AAAA;AAAA,MACX,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU;AAAA,IACZ;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AACtF,WAAK,gBAAgB,IAAI;AACzB,WAAK,kBAAkB;AAAA,IACzB;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAGA,MAAM,UAA+B;AACnC,UAAM,SAAS,CAAC;AAChB,qBAAiB,SAAS,MAAM;AAC9B,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB;AACA,eAAO,0BAAY,MAAM;AAAA,EAC3B;AAAA,EAEA,OAAkD;AAChD,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA;AAAA,EAGA,QAAQ;AACN,QAAI,CAAC,KAAK,MAAM,OAAQ,MAAK,MAAM,MAAM;AACzC,QAAI,CAAC,KAAK,OAAO,OAAQ,MAAK,OAAO,MAAM;AAC3C,QAAI,CAAC,KAAK,gBAAgB,OAAO,QAAS,MAAK,gBAAgB,MAAM;AACrE,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,CAAC,OAAO,aAAa,IAAmB;AACtC,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -5,7 +5,10 @@ import type { ReadableStream } from 'node:stream/web';
5
5
  import type { TTSMetrics } from '../metrics/base.js';
6
6
  import { type APIConnectOptions } from '../types.js';
7
7
  import { AsyncIterableQueue } from '../utils.js';
8
- /** SynthesizedAudio is a packet of speech synthesis as returned by the TTS. */
8
+ import type { TimedString } from '../voice/io.js';
9
+ /**
10
+ * SynthesizedAudio is a packet of speech synthesis as returned by the TTS.
11
+ */
9
12
  export interface SynthesizedAudio {
10
13
  /** Request ID (one segment could be made up of multiple requests) */
11
14
  requestId: string;
@@ -17,6 +20,10 @@ export interface SynthesizedAudio {
17
20
  deltaText?: string;
18
21
  /** Whether this is the last frame of the segment (streaming only) */
19
22
  final: boolean;
23
+ /**
24
+ * Timed transcripts associated with this audio packet (word-level timestamps).
25
+ */
26
+ timedTranscripts?: TimedString[];
20
27
  }
21
28
  /**
22
29
  * Describes the capabilities of the TTS provider.
@@ -27,6 +34,7 @@ export interface SynthesizedAudio {
27
34
  */
28
35
  export interface TTSCapabilities {
29
36
  streaming: boolean;
37
+ alignedTranscript?: boolean;
30
38
  }
31
39
  export interface TTSError {
32
40
  type: 'tts_error';
package/dist/tts/tts.d.ts CHANGED
@@ -5,7 +5,10 @@ import type { ReadableStream } from 'node:stream/web';
5
5
  import type { TTSMetrics } from '../metrics/base.js';
6
6
  import { type APIConnectOptions } from '../types.js';
7
7
  import { AsyncIterableQueue } from '../utils.js';
8
- /** SynthesizedAudio is a packet of speech synthesis as returned by the TTS. */
8
+ import type { TimedString } from '../voice/io.js';
9
+ /**
10
+ * SynthesizedAudio is a packet of speech synthesis as returned by the TTS.
11
+ */
9
12
  export interface SynthesizedAudio {
10
13
  /** Request ID (one segment could be made up of multiple requests) */
11
14
  requestId: string;
@@ -17,6 +20,10 @@ export interface SynthesizedAudio {
17
20
  deltaText?: string;
18
21
  /** Whether this is the last frame of the segment (streaming only) */
19
22
  final: boolean;
23
+ /**
24
+ * Timed transcripts associated with this audio packet (word-level timestamps).
25
+ */
26
+ timedTranscripts?: TimedString[];
20
27
  }
21
28
  /**
22
29
  * Describes the capabilities of the TTS provider.
@@ -27,6 +34,7 @@ export interface SynthesizedAudio {
27
34
  */
28
35
  export interface TTSCapabilities {
29
36
  streaming: boolean;
37
+ alignedTranscript?: boolean;
30
38
  }
31
39
  export interface TTSError {
32
40
  type: 'tts_error';