@livekit/agents 1.0.47 → 1.1.0-dev.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (444) hide show
  1. package/dist/beta/index.cjs +29 -0
  2. package/dist/beta/index.cjs.map +1 -0
  3. package/dist/beta/index.d.cts +2 -0
  4. package/dist/beta/index.d.ts +2 -0
  5. package/dist/beta/index.d.ts.map +1 -0
  6. package/dist/beta/index.js +7 -0
  7. package/dist/beta/index.js.map +1 -0
  8. package/dist/beta/workflows/index.cjs +29 -0
  9. package/dist/beta/workflows/index.cjs.map +1 -0
  10. package/dist/beta/workflows/index.d.cts +2 -0
  11. package/dist/beta/workflows/index.d.ts +2 -0
  12. package/dist/beta/workflows/index.d.ts.map +1 -0
  13. package/dist/beta/workflows/index.js +7 -0
  14. package/dist/beta/workflows/index.js.map +1 -0
  15. package/dist/beta/workflows/task_group.cjs +162 -0
  16. package/dist/beta/workflows/task_group.cjs.map +1 -0
  17. package/dist/beta/workflows/task_group.d.cts +32 -0
  18. package/dist/beta/workflows/task_group.d.ts +32 -0
  19. package/dist/beta/workflows/task_group.d.ts.map +1 -0
  20. package/dist/beta/workflows/task_group.js +138 -0
  21. package/dist/beta/workflows/task_group.js.map +1 -0
  22. package/dist/constants.cjs +27 -0
  23. package/dist/constants.cjs.map +1 -1
  24. package/dist/constants.d.cts +9 -0
  25. package/dist/constants.d.ts +9 -0
  26. package/dist/constants.d.ts.map +1 -1
  27. package/dist/constants.js +18 -0
  28. package/dist/constants.js.map +1 -1
  29. package/dist/index.cjs +3 -0
  30. package/dist/index.cjs.map +1 -1
  31. package/dist/index.d.cts +2 -1
  32. package/dist/index.d.ts +2 -1
  33. package/dist/index.d.ts.map +1 -1
  34. package/dist/index.js +2 -0
  35. package/dist/index.js.map +1 -1
  36. package/dist/inference/api_protos.d.cts +12 -12
  37. package/dist/inference/api_protos.d.ts +12 -12
  38. package/dist/inference/interruption/defaults.cjs +81 -0
  39. package/dist/inference/interruption/defaults.cjs.map +1 -0
  40. package/dist/inference/interruption/defaults.d.cts +19 -0
  41. package/dist/inference/interruption/defaults.d.ts +19 -0
  42. package/dist/inference/interruption/defaults.d.ts.map +1 -0
  43. package/dist/inference/interruption/defaults.js +46 -0
  44. package/dist/inference/interruption/defaults.js.map +1 -0
  45. package/dist/inference/interruption/errors.cjs +44 -0
  46. package/dist/inference/interruption/errors.cjs.map +1 -0
  47. package/dist/inference/interruption/errors.d.cts +12 -0
  48. package/dist/inference/interruption/errors.d.ts +12 -0
  49. package/dist/inference/interruption/errors.d.ts.map +1 -0
  50. package/dist/inference/interruption/errors.js +20 -0
  51. package/dist/inference/interruption/errors.js.map +1 -0
  52. package/dist/inference/interruption/http_transport.cjs +147 -0
  53. package/dist/inference/interruption/http_transport.cjs.map +1 -0
  54. package/dist/inference/interruption/http_transport.d.cts +63 -0
  55. package/dist/inference/interruption/http_transport.d.ts +63 -0
  56. package/dist/inference/interruption/http_transport.d.ts.map +1 -0
  57. package/dist/inference/interruption/http_transport.js +121 -0
  58. package/dist/inference/interruption/http_transport.js.map +1 -0
  59. package/dist/inference/interruption/interruption_cache_entry.cjs +58 -0
  60. package/dist/inference/interruption/interruption_cache_entry.cjs.map +1 -0
  61. package/dist/inference/interruption/interruption_cache_entry.d.cts +30 -0
  62. package/dist/inference/interruption/interruption_cache_entry.d.ts +30 -0
  63. package/dist/inference/interruption/interruption_cache_entry.d.ts.map +1 -0
  64. package/dist/inference/interruption/interruption_cache_entry.js +34 -0
  65. package/dist/inference/interruption/interruption_cache_entry.js.map +1 -0
  66. package/dist/inference/interruption/interruption_detector.cjs +181 -0
  67. package/dist/inference/interruption/interruption_detector.cjs.map +1 -0
  68. package/dist/inference/interruption/interruption_detector.d.cts +59 -0
  69. package/dist/inference/interruption/interruption_detector.d.ts +59 -0
  70. package/dist/inference/interruption/interruption_detector.d.ts.map +1 -0
  71. package/dist/inference/interruption/interruption_detector.js +147 -0
  72. package/dist/inference/interruption/interruption_detector.js.map +1 -0
  73. package/dist/inference/interruption/interruption_stream.cjs +368 -0
  74. package/dist/inference/interruption/interruption_stream.cjs.map +1 -0
  75. package/dist/inference/interruption/interruption_stream.d.cts +46 -0
  76. package/dist/inference/interruption/interruption_stream.d.ts +46 -0
  77. package/dist/inference/interruption/interruption_stream.d.ts.map +1 -0
  78. package/dist/inference/interruption/interruption_stream.js +344 -0
  79. package/dist/inference/interruption/interruption_stream.js.map +1 -0
  80. package/dist/inference/interruption/types.cjs +17 -0
  81. package/dist/inference/interruption/types.cjs.map +1 -0
  82. package/dist/inference/interruption/types.d.cts +66 -0
  83. package/dist/inference/interruption/types.d.ts +66 -0
  84. package/dist/inference/interruption/types.d.ts.map +1 -0
  85. package/dist/inference/interruption/types.js +1 -0
  86. package/dist/inference/interruption/types.js.map +1 -0
  87. package/dist/inference/interruption/utils.cjs +130 -0
  88. package/dist/inference/interruption/utils.cjs.map +1 -0
  89. package/dist/inference/interruption/utils.d.cts +41 -0
  90. package/dist/inference/interruption/utils.d.ts +41 -0
  91. package/dist/inference/interruption/utils.d.ts.map +1 -0
  92. package/dist/inference/interruption/utils.js +105 -0
  93. package/dist/inference/interruption/utils.js.map +1 -0
  94. package/dist/inference/interruption/utils.test.cjs +105 -0
  95. package/dist/inference/interruption/utils.test.cjs.map +1 -0
  96. package/dist/inference/interruption/utils.test.js +104 -0
  97. package/dist/inference/interruption/utils.test.js.map +1 -0
  98. package/dist/inference/interruption/ws_transport.cjs +329 -0
  99. package/dist/inference/interruption/ws_transport.cjs.map +1 -0
  100. package/dist/inference/interruption/ws_transport.d.cts +33 -0
  101. package/dist/inference/interruption/ws_transport.d.ts +33 -0
  102. package/dist/inference/interruption/ws_transport.d.ts.map +1 -0
  103. package/dist/inference/interruption/ws_transport.js +295 -0
  104. package/dist/inference/interruption/ws_transport.js.map +1 -0
  105. package/dist/inference/llm.cjs +14 -10
  106. package/dist/inference/llm.cjs.map +1 -1
  107. package/dist/inference/llm.d.cts +2 -1
  108. package/dist/inference/llm.d.ts +2 -1
  109. package/dist/inference/llm.d.ts.map +1 -1
  110. package/dist/inference/llm.js +8 -10
  111. package/dist/inference/llm.js.map +1 -1
  112. package/dist/inference/stt.cjs +7 -2
  113. package/dist/inference/stt.cjs.map +1 -1
  114. package/dist/inference/stt.d.cts +2 -0
  115. package/dist/inference/stt.d.ts +2 -0
  116. package/dist/inference/stt.d.ts.map +1 -1
  117. package/dist/inference/stt.js +8 -3
  118. package/dist/inference/stt.js.map +1 -1
  119. package/dist/inference/tts.cjs +7 -2
  120. package/dist/inference/tts.cjs.map +1 -1
  121. package/dist/inference/tts.d.cts +2 -0
  122. package/dist/inference/tts.d.ts +2 -0
  123. package/dist/inference/tts.d.ts.map +1 -1
  124. package/dist/inference/tts.js +8 -3
  125. package/dist/inference/tts.js.map +1 -1
  126. package/dist/inference/utils.cjs +26 -7
  127. package/dist/inference/utils.cjs.map +1 -1
  128. package/dist/inference/utils.d.cts +13 -0
  129. package/dist/inference/utils.d.ts +13 -0
  130. package/dist/inference/utils.d.ts.map +1 -1
  131. package/dist/inference/utils.js +18 -2
  132. package/dist/inference/utils.js.map +1 -1
  133. package/dist/llm/chat_context.cjs +108 -2
  134. package/dist/llm/chat_context.cjs.map +1 -1
  135. package/dist/llm/chat_context.d.cts +28 -1
  136. package/dist/llm/chat_context.d.ts +28 -1
  137. package/dist/llm/chat_context.d.ts.map +1 -1
  138. package/dist/llm/chat_context.js +108 -2
  139. package/dist/llm/chat_context.js.map +1 -1
  140. package/dist/llm/chat_context.test.cjs +43 -0
  141. package/dist/llm/chat_context.test.cjs.map +1 -1
  142. package/dist/llm/chat_context.test.js +43 -0
  143. package/dist/llm/chat_context.test.js.map +1 -1
  144. package/dist/llm/index.cjs +2 -0
  145. package/dist/llm/index.cjs.map +1 -1
  146. package/dist/llm/index.d.cts +2 -2
  147. package/dist/llm/index.d.ts +2 -2
  148. package/dist/llm/index.d.ts.map +1 -1
  149. package/dist/llm/index.js +3 -1
  150. package/dist/llm/index.js.map +1 -1
  151. package/dist/llm/llm.cjs +16 -1
  152. package/dist/llm/llm.cjs.map +1 -1
  153. package/dist/llm/llm.d.cts +9 -0
  154. package/dist/llm/llm.d.ts +9 -0
  155. package/dist/llm/llm.d.ts.map +1 -1
  156. package/dist/llm/llm.js +16 -1
  157. package/dist/llm/llm.js.map +1 -1
  158. package/dist/llm/provider_format/index.d.cts +1 -1
  159. package/dist/llm/provider_format/index.d.ts +1 -1
  160. package/dist/llm/realtime.cjs +3 -0
  161. package/dist/llm/realtime.cjs.map +1 -1
  162. package/dist/llm/realtime.d.cts +1 -0
  163. package/dist/llm/realtime.d.ts +1 -0
  164. package/dist/llm/realtime.d.ts.map +1 -1
  165. package/dist/llm/realtime.js +3 -0
  166. package/dist/llm/realtime.js.map +1 -1
  167. package/dist/llm/tool_context.cjs +7 -0
  168. package/dist/llm/tool_context.cjs.map +1 -1
  169. package/dist/llm/tool_context.d.cts +10 -2
  170. package/dist/llm/tool_context.d.ts +10 -2
  171. package/dist/llm/tool_context.d.ts.map +1 -1
  172. package/dist/llm/tool_context.js +6 -0
  173. package/dist/llm/tool_context.js.map +1 -1
  174. package/dist/metrics/base.cjs.map +1 -1
  175. package/dist/metrics/base.d.cts +45 -1
  176. package/dist/metrics/base.d.ts +45 -1
  177. package/dist/metrics/base.d.ts.map +1 -1
  178. package/dist/metrics/index.cjs +5 -0
  179. package/dist/metrics/index.cjs.map +1 -1
  180. package/dist/metrics/index.d.cts +2 -1
  181. package/dist/metrics/index.d.ts +2 -1
  182. package/dist/metrics/index.d.ts.map +1 -1
  183. package/dist/metrics/index.js +6 -0
  184. package/dist/metrics/index.js.map +1 -1
  185. package/dist/metrics/model_usage.cjs +189 -0
  186. package/dist/metrics/model_usage.cjs.map +1 -0
  187. package/dist/metrics/model_usage.d.cts +92 -0
  188. package/dist/metrics/model_usage.d.ts +92 -0
  189. package/dist/metrics/model_usage.d.ts.map +1 -0
  190. package/dist/metrics/model_usage.js +164 -0
  191. package/dist/metrics/model_usage.js.map +1 -0
  192. package/dist/metrics/model_usage.test.cjs +474 -0
  193. package/dist/metrics/model_usage.test.cjs.map +1 -0
  194. package/dist/metrics/model_usage.test.js +476 -0
  195. package/dist/metrics/model_usage.test.js.map +1 -0
  196. package/dist/metrics/usage_collector.cjs +3 -0
  197. package/dist/metrics/usage_collector.cjs.map +1 -1
  198. package/dist/metrics/usage_collector.d.cts +9 -0
  199. package/dist/metrics/usage_collector.d.ts +9 -0
  200. package/dist/metrics/usage_collector.d.ts.map +1 -1
  201. package/dist/metrics/usage_collector.js +3 -0
  202. package/dist/metrics/usage_collector.js.map +1 -1
  203. package/dist/metrics/utils.cjs +9 -0
  204. package/dist/metrics/utils.cjs.map +1 -1
  205. package/dist/metrics/utils.d.ts.map +1 -1
  206. package/dist/metrics/utils.js +9 -0
  207. package/dist/metrics/utils.js.map +1 -1
  208. package/dist/stream/multi_input_stream.test.cjs +4 -0
  209. package/dist/stream/multi_input_stream.test.cjs.map +1 -1
  210. package/dist/stream/multi_input_stream.test.js +5 -1
  211. package/dist/stream/multi_input_stream.test.js.map +1 -1
  212. package/dist/stream/stream_channel.cjs +31 -0
  213. package/dist/stream/stream_channel.cjs.map +1 -1
  214. package/dist/stream/stream_channel.d.cts +4 -2
  215. package/dist/stream/stream_channel.d.ts +4 -2
  216. package/dist/stream/stream_channel.d.ts.map +1 -1
  217. package/dist/stream/stream_channel.js +31 -0
  218. package/dist/stream/stream_channel.js.map +1 -1
  219. package/dist/stt/stt.cjs +34 -2
  220. package/dist/stt/stt.cjs.map +1 -1
  221. package/dist/stt/stt.d.cts +22 -0
  222. package/dist/stt/stt.d.ts +22 -0
  223. package/dist/stt/stt.d.ts.map +1 -1
  224. package/dist/stt/stt.js +34 -2
  225. package/dist/stt/stt.js.map +1 -1
  226. package/dist/telemetry/otel_http_exporter.cjs +24 -5
  227. package/dist/telemetry/otel_http_exporter.cjs.map +1 -1
  228. package/dist/telemetry/otel_http_exporter.d.cts +1 -0
  229. package/dist/telemetry/otel_http_exporter.d.ts +1 -0
  230. package/dist/telemetry/otel_http_exporter.d.ts.map +1 -1
  231. package/dist/telemetry/otel_http_exporter.js +24 -5
  232. package/dist/telemetry/otel_http_exporter.js.map +1 -1
  233. package/dist/telemetry/trace_types.cjs +5 -5
  234. package/dist/telemetry/trace_types.cjs.map +1 -1
  235. package/dist/telemetry/trace_types.d.cts +9 -5
  236. package/dist/telemetry/trace_types.d.ts +9 -5
  237. package/dist/telemetry/trace_types.d.ts.map +1 -1
  238. package/dist/telemetry/trace_types.js +5 -5
  239. package/dist/telemetry/trace_types.js.map +1 -1
  240. package/dist/telemetry/traces.cjs +47 -8
  241. package/dist/telemetry/traces.cjs.map +1 -1
  242. package/dist/telemetry/traces.d.ts.map +1 -1
  243. package/dist/telemetry/traces.js +47 -8
  244. package/dist/telemetry/traces.js.map +1 -1
  245. package/dist/tts/tts.cjs +64 -2
  246. package/dist/tts/tts.cjs.map +1 -1
  247. package/dist/tts/tts.d.cts +34 -0
  248. package/dist/tts/tts.d.ts +34 -0
  249. package/dist/tts/tts.d.ts.map +1 -1
  250. package/dist/tts/tts.js +64 -2
  251. package/dist/tts/tts.js.map +1 -1
  252. package/dist/utils.cjs +1 -0
  253. package/dist/utils.cjs.map +1 -1
  254. package/dist/utils.d.ts.map +1 -1
  255. package/dist/utils.js +1 -0
  256. package/dist/utils.js.map +1 -1
  257. package/dist/version.cjs +1 -1
  258. package/dist/version.js +1 -1
  259. package/dist/voice/agent.cjs +34 -4
  260. package/dist/voice/agent.cjs.map +1 -1
  261. package/dist/voice/agent.d.cts +11 -2
  262. package/dist/voice/agent.d.ts +11 -2
  263. package/dist/voice/agent.d.ts.map +1 -1
  264. package/dist/voice/agent.js +34 -4
  265. package/dist/voice/agent.js.map +1 -1
  266. package/dist/voice/agent_activity.cjs +292 -44
  267. package/dist/voice/agent_activity.cjs.map +1 -1
  268. package/dist/voice/agent_activity.d.cts +27 -6
  269. package/dist/voice/agent_activity.d.ts +27 -6
  270. package/dist/voice/agent_activity.d.ts.map +1 -1
  271. package/dist/voice/agent_activity.js +293 -45
  272. package/dist/voice/agent_activity.js.map +1 -1
  273. package/dist/voice/agent_session.cjs +105 -48
  274. package/dist/voice/agent_session.cjs.map +1 -1
  275. package/dist/voice/agent_session.d.cts +90 -20
  276. package/dist/voice/agent_session.d.ts +90 -20
  277. package/dist/voice/agent_session.d.ts.map +1 -1
  278. package/dist/voice/agent_session.js +105 -46
  279. package/dist/voice/agent_session.js.map +1 -1
  280. package/dist/voice/audio_recognition.cjs +287 -6
  281. package/dist/voice/audio_recognition.cjs.map +1 -1
  282. package/dist/voice/audio_recognition.d.cts +42 -3
  283. package/dist/voice/audio_recognition.d.ts +42 -3
  284. package/dist/voice/audio_recognition.d.ts.map +1 -1
  285. package/dist/voice/audio_recognition.js +289 -7
  286. package/dist/voice/audio_recognition.js.map +1 -1
  287. package/dist/voice/client_events.cjs +554 -0
  288. package/dist/voice/client_events.cjs.map +1 -0
  289. package/dist/voice/client_events.d.cts +195 -0
  290. package/dist/voice/client_events.d.ts +195 -0
  291. package/dist/voice/client_events.d.ts.map +1 -0
  292. package/dist/voice/client_events.js +548 -0
  293. package/dist/voice/client_events.js.map +1 -0
  294. package/dist/voice/events.cjs +1 -0
  295. package/dist/voice/events.cjs.map +1 -1
  296. package/dist/voice/events.d.cts +8 -5
  297. package/dist/voice/events.d.ts +8 -5
  298. package/dist/voice/events.d.ts.map +1 -1
  299. package/dist/voice/events.js +1 -0
  300. package/dist/voice/events.js.map +1 -1
  301. package/dist/voice/generation.cjs +43 -8
  302. package/dist/voice/generation.cjs.map +1 -1
  303. package/dist/voice/generation.d.cts +3 -3
  304. package/dist/voice/generation.d.ts +3 -3
  305. package/dist/voice/generation.d.ts.map +1 -1
  306. package/dist/voice/generation.js +43 -8
  307. package/dist/voice/generation.js.map +1 -1
  308. package/dist/voice/index.cjs +1 -0
  309. package/dist/voice/index.cjs.map +1 -1
  310. package/dist/voice/index.d.cts +1 -0
  311. package/dist/voice/index.d.ts +1 -0
  312. package/dist/voice/index.d.ts.map +1 -1
  313. package/dist/voice/index.js +1 -0
  314. package/dist/voice/index.js.map +1 -1
  315. package/dist/voice/report.cjs +20 -8
  316. package/dist/voice/report.cjs.map +1 -1
  317. package/dist/voice/report.d.cts +5 -0
  318. package/dist/voice/report.d.ts +5 -0
  319. package/dist/voice/report.d.ts.map +1 -1
  320. package/dist/voice/report.js +20 -8
  321. package/dist/voice/report.js.map +1 -1
  322. package/dist/voice/report.test.cjs +106 -0
  323. package/dist/voice/report.test.cjs.map +1 -0
  324. package/dist/voice/report.test.js +105 -0
  325. package/dist/voice/report.test.js.map +1 -0
  326. package/dist/voice/room_io/room_io.cjs +16 -41
  327. package/dist/voice/room_io/room_io.cjs.map +1 -1
  328. package/dist/voice/room_io/room_io.d.cts +4 -9
  329. package/dist/voice/room_io/room_io.d.ts +4 -9
  330. package/dist/voice/room_io/room_io.d.ts.map +1 -1
  331. package/dist/voice/room_io/room_io.js +17 -43
  332. package/dist/voice/room_io/room_io.js.map +1 -1
  333. package/dist/voice/testing/fake_llm.cjs +127 -0
  334. package/dist/voice/testing/fake_llm.cjs.map +1 -0
  335. package/dist/voice/testing/fake_llm.d.cts +30 -0
  336. package/dist/voice/testing/fake_llm.d.ts +30 -0
  337. package/dist/voice/testing/fake_llm.d.ts.map +1 -0
  338. package/dist/voice/testing/fake_llm.js +103 -0
  339. package/dist/voice/testing/fake_llm.js.map +1 -0
  340. package/dist/voice/testing/index.cjs +3 -0
  341. package/dist/voice/testing/index.cjs.map +1 -1
  342. package/dist/voice/testing/index.d.cts +1 -0
  343. package/dist/voice/testing/index.d.ts +1 -0
  344. package/dist/voice/testing/index.d.ts.map +1 -1
  345. package/dist/voice/testing/index.js +2 -0
  346. package/dist/voice/testing/index.js.map +1 -1
  347. package/dist/voice/turn_config/endpointing.cjs +33 -0
  348. package/dist/voice/turn_config/endpointing.cjs.map +1 -0
  349. package/dist/voice/turn_config/endpointing.d.cts +30 -0
  350. package/dist/voice/turn_config/endpointing.d.ts +30 -0
  351. package/dist/voice/turn_config/endpointing.d.ts.map +1 -0
  352. package/dist/voice/turn_config/endpointing.js +9 -0
  353. package/dist/voice/turn_config/endpointing.js.map +1 -0
  354. package/dist/voice/turn_config/interruption.cjs +37 -0
  355. package/dist/voice/turn_config/interruption.cjs.map +1 -0
  356. package/dist/voice/turn_config/interruption.d.cts +53 -0
  357. package/dist/voice/turn_config/interruption.d.ts +53 -0
  358. package/dist/voice/turn_config/interruption.d.ts.map +1 -0
  359. package/dist/voice/turn_config/interruption.js +13 -0
  360. package/dist/voice/turn_config/interruption.js.map +1 -0
  361. package/dist/voice/turn_config/turn_handling.cjs +35 -0
  362. package/dist/voice/turn_config/turn_handling.cjs.map +1 -0
  363. package/dist/voice/turn_config/turn_handling.d.cts +36 -0
  364. package/dist/voice/turn_config/turn_handling.d.ts +36 -0
  365. package/dist/voice/turn_config/turn_handling.d.ts.map +1 -0
  366. package/dist/voice/turn_config/turn_handling.js +11 -0
  367. package/dist/voice/turn_config/turn_handling.js.map +1 -0
  368. package/dist/voice/turn_config/utils.cjs +97 -0
  369. package/dist/voice/turn_config/utils.cjs.map +1 -0
  370. package/dist/voice/turn_config/utils.d.cts +25 -0
  371. package/dist/voice/turn_config/utils.d.ts +25 -0
  372. package/dist/voice/turn_config/utils.d.ts.map +1 -0
  373. package/dist/voice/turn_config/utils.js +73 -0
  374. package/dist/voice/turn_config/utils.js.map +1 -0
  375. package/dist/voice/turn_config/utils.test.cjs +86 -0
  376. package/dist/voice/turn_config/utils.test.cjs.map +1 -0
  377. package/dist/voice/turn_config/utils.test.js +85 -0
  378. package/dist/voice/turn_config/utils.test.js.map +1 -0
  379. package/dist/voice/wire_format.cjs +798 -0
  380. package/dist/voice/wire_format.cjs.map +1 -0
  381. package/dist/voice/wire_format.d.cts +5503 -0
  382. package/dist/voice/wire_format.d.ts +5503 -0
  383. package/dist/voice/wire_format.d.ts.map +1 -0
  384. package/dist/voice/wire_format.js +728 -0
  385. package/dist/voice/wire_format.js.map +1 -0
  386. package/package.json +2 -1
  387. package/src/beta/index.ts +9 -0
  388. package/src/beta/workflows/index.ts +9 -0
  389. package/src/beta/workflows/task_group.ts +194 -0
  390. package/src/constants.ts +13 -0
  391. package/src/index.ts +2 -1
  392. package/src/inference/interruption/defaults.ts +51 -0
  393. package/src/inference/interruption/errors.ts +25 -0
  394. package/src/inference/interruption/http_transport.ts +187 -0
  395. package/src/inference/interruption/interruption_cache_entry.ts +50 -0
  396. package/src/inference/interruption/interruption_detector.ts +188 -0
  397. package/src/inference/interruption/interruption_stream.ts +467 -0
  398. package/src/inference/interruption/types.ts +84 -0
  399. package/src/inference/interruption/utils.test.ts +132 -0
  400. package/src/inference/interruption/utils.ts +137 -0
  401. package/src/inference/interruption/ws_transport.ts +402 -0
  402. package/src/inference/llm.ts +9 -12
  403. package/src/inference/stt.ts +10 -3
  404. package/src/inference/tts.ts +10 -3
  405. package/src/inference/utils.ts +29 -1
  406. package/src/llm/chat_context.test.ts +48 -0
  407. package/src/llm/chat_context.ts +161 -0
  408. package/src/llm/index.ts +2 -0
  409. package/src/llm/llm.ts +16 -0
  410. package/src/llm/realtime.ts +4 -0
  411. package/src/llm/tool_context.ts +14 -0
  412. package/src/metrics/base.ts +48 -1
  413. package/src/metrics/index.ts +11 -0
  414. package/src/metrics/model_usage.test.ts +545 -0
  415. package/src/metrics/model_usage.ts +262 -0
  416. package/src/metrics/usage_collector.ts +11 -0
  417. package/src/metrics/utils.ts +11 -0
  418. package/src/stream/multi_input_stream.test.ts +6 -1
  419. package/src/stream/stream_channel.ts +34 -2
  420. package/src/stt/stt.ts +38 -0
  421. package/src/telemetry/otel_http_exporter.ts +28 -5
  422. package/src/telemetry/trace_types.ts +11 -8
  423. package/src/telemetry/traces.ts +111 -54
  424. package/src/tts/tts.ts +69 -1
  425. package/src/utils.ts +5 -0
  426. package/src/voice/agent.ts +41 -3
  427. package/src/voice/agent_activity.ts +371 -34
  428. package/src/voice/agent_session.ts +207 -59
  429. package/src/voice/audio_recognition.ts +385 -9
  430. package/src/voice/client_events.ts +838 -0
  431. package/src/voice/events.ts +14 -4
  432. package/src/voice/generation.ts +52 -9
  433. package/src/voice/index.ts +1 -0
  434. package/src/voice/report.test.ts +117 -0
  435. package/src/voice/report.ts +29 -6
  436. package/src/voice/room_io/room_io.ts +21 -64
  437. package/src/voice/testing/fake_llm.ts +138 -0
  438. package/src/voice/testing/index.ts +2 -0
  439. package/src/voice/turn_config/endpointing.ts +33 -0
  440. package/src/voice/turn_config/interruption.ts +56 -0
  441. package/src/voice/turn_config/turn_handling.ts +45 -0
  442. package/src/voice/turn_config/utils.test.ts +100 -0
  443. package/src/voice/turn_config/utils.ts +103 -0
  444. package/src/voice/wire_format.ts +827 -0
@@ -0,0 +1,474 @@
1
+ "use strict";
2
+ var import_vitest = require("vitest");
3
+ var import_model_usage = require("./model_usage.cjs");
4
+ (0, import_vitest.describe)("model_usage", () => {
5
+ (0, import_vitest.describe)("filterZeroValues", () => {
6
+ (0, import_vitest.it)("should filter out zero values from LLMModelUsage", () => {
7
+ const usage = {
8
+ type: "llm_usage",
9
+ provider: "openai",
10
+ model: "gpt-4o",
11
+ inputTokens: 100,
12
+ inputCachedTokens: 0,
13
+ inputAudioTokens: 0,
14
+ inputCachedAudioTokens: 0,
15
+ inputTextTokens: 0,
16
+ inputCachedTextTokens: 0,
17
+ inputImageTokens: 0,
18
+ inputCachedImageTokens: 0,
19
+ outputTokens: 50,
20
+ outputAudioTokens: 0,
21
+ outputTextTokens: 0,
22
+ sessionDurationMs: 0
23
+ };
24
+ const filtered = (0, import_model_usage.filterZeroValues)(usage);
25
+ (0, import_vitest.expect)(filtered.type).toBe("llm_usage");
26
+ (0, import_vitest.expect)(filtered.provider).toBe("openai");
27
+ (0, import_vitest.expect)(filtered.model).toBe("gpt-4o");
28
+ (0, import_vitest.expect)(filtered.inputTokens).toBe(100);
29
+ (0, import_vitest.expect)(filtered.outputTokens).toBe(50);
30
+ (0, import_vitest.expect)(filtered.inputCachedTokens).toBeUndefined();
31
+ (0, import_vitest.expect)(filtered.inputAudioTokens).toBeUndefined();
32
+ (0, import_vitest.expect)(filtered.sessionDurationMs).toBeUndefined();
33
+ });
34
+ (0, import_vitest.it)("should filter out zero values from TTSModelUsage", () => {
35
+ const usage = {
36
+ type: "tts_usage",
37
+ provider: "elevenlabs",
38
+ model: "eleven_turbo_v2",
39
+ inputTokens: 0,
40
+ outputTokens: 0,
41
+ charactersCount: 500,
42
+ audioDurationMs: 3e3
43
+ };
44
+ const filtered = (0, import_model_usage.filterZeroValues)(usage);
45
+ (0, import_vitest.expect)(filtered.type).toBe("tts_usage");
46
+ (0, import_vitest.expect)(filtered.provider).toBe("elevenlabs");
47
+ (0, import_vitest.expect)(filtered.charactersCount).toBe(500);
48
+ (0, import_vitest.expect)(filtered.audioDurationMs).toBe(3e3);
49
+ (0, import_vitest.expect)(filtered.inputTokens).toBeUndefined();
50
+ (0, import_vitest.expect)(filtered.outputTokens).toBeUndefined();
51
+ });
52
+ (0, import_vitest.it)("should keep all values when none are zero", () => {
53
+ const usage = {
54
+ type: "stt_usage",
55
+ provider: "deepgram",
56
+ model: "nova-2",
57
+ inputTokens: 10,
58
+ outputTokens: 20,
59
+ audioDurationMs: 5e3
60
+ };
61
+ const filtered = (0, import_model_usage.filterZeroValues)(usage);
62
+ (0, import_vitest.expect)(Object.keys(filtered)).toHaveLength(6);
63
+ (0, import_vitest.expect)(filtered).toEqual(usage);
64
+ });
65
+ });
66
+ (0, import_vitest.describe)("ModelUsageCollector", () => {
67
+ let collector;
68
+ (0, import_vitest.beforeEach)(() => {
69
+ collector = new import_model_usage.ModelUsageCollector();
70
+ });
71
+ (0, import_vitest.describe)("collect LLM metrics", () => {
72
+ (0, import_vitest.it)("should aggregate LLM metrics by provider and model", () => {
73
+ const metrics1 = {
74
+ type: "llm_metrics",
75
+ label: "test",
76
+ requestId: "req1",
77
+ timestamp: Date.now(),
78
+ durationMs: 100,
79
+ ttftMs: 50,
80
+ cancelled: false,
81
+ completionTokens: 100,
82
+ promptTokens: 200,
83
+ promptCachedTokens: 50,
84
+ totalTokens: 300,
85
+ tokensPerSecond: 10,
86
+ metadata: {
87
+ modelProvider: "openai",
88
+ modelName: "gpt-4o"
89
+ }
90
+ };
91
+ const metrics2 = {
92
+ type: "llm_metrics",
93
+ label: "test",
94
+ requestId: "req2",
95
+ timestamp: Date.now(),
96
+ durationMs: 150,
97
+ ttftMs: 60,
98
+ cancelled: false,
99
+ completionTokens: 150,
100
+ promptTokens: 300,
101
+ promptCachedTokens: 75,
102
+ totalTokens: 450,
103
+ tokensPerSecond: 12,
104
+ metadata: {
105
+ modelProvider: "openai",
106
+ modelName: "gpt-4o"
107
+ }
108
+ };
109
+ collector.collect(metrics1);
110
+ collector.collect(metrics2);
111
+ const usage = collector.flatten();
112
+ (0, import_vitest.expect)(usage).toHaveLength(1);
113
+ const llmUsage = usage[0];
114
+ (0, import_vitest.expect)(llmUsage.type).toBe("llm_usage");
115
+ (0, import_vitest.expect)(llmUsage.provider).toBe("openai");
116
+ (0, import_vitest.expect)(llmUsage.model).toBe("gpt-4o");
117
+ (0, import_vitest.expect)(llmUsage.inputTokens).toBe(500);
118
+ (0, import_vitest.expect)(llmUsage.inputCachedTokens).toBe(125);
119
+ (0, import_vitest.expect)(llmUsage.outputTokens).toBe(250);
120
+ });
121
+ (0, import_vitest.it)("should separate metrics by different providers", () => {
122
+ const openaiMetrics = {
123
+ type: "llm_metrics",
124
+ label: "test",
125
+ requestId: "req1",
126
+ timestamp: Date.now(),
127
+ durationMs: 100,
128
+ ttftMs: 50,
129
+ cancelled: false,
130
+ completionTokens: 100,
131
+ promptTokens: 200,
132
+ promptCachedTokens: 0,
133
+ totalTokens: 300,
134
+ tokensPerSecond: 10,
135
+ metadata: {
136
+ modelProvider: "openai",
137
+ modelName: "gpt-4o"
138
+ }
139
+ };
140
+ const anthropicMetrics = {
141
+ type: "llm_metrics",
142
+ label: "test",
143
+ requestId: "req2",
144
+ timestamp: Date.now(),
145
+ durationMs: 120,
146
+ ttftMs: 55,
147
+ cancelled: false,
148
+ completionTokens: 80,
149
+ promptTokens: 150,
150
+ promptCachedTokens: 0,
151
+ totalTokens: 230,
152
+ tokensPerSecond: 8,
153
+ metadata: {
154
+ modelProvider: "anthropic",
155
+ modelName: "claude-3-5-sonnet"
156
+ }
157
+ };
158
+ collector.collect(openaiMetrics);
159
+ collector.collect(anthropicMetrics);
160
+ const usage = collector.flatten();
161
+ (0, import_vitest.expect)(usage).toHaveLength(2);
162
+ const openaiUsage = usage.find(
163
+ (u) => u.type === "llm_usage" && u.provider === "openai"
164
+ );
165
+ const anthropicUsage = usage.find(
166
+ (u) => u.type === "llm_usage" && u.provider === "anthropic"
167
+ );
168
+ (0, import_vitest.expect)(openaiUsage.inputTokens).toBe(200);
169
+ (0, import_vitest.expect)(openaiUsage.outputTokens).toBe(100);
170
+ (0, import_vitest.expect)(anthropicUsage.inputTokens).toBe(150);
171
+ (0, import_vitest.expect)(anthropicUsage.outputTokens).toBe(80);
172
+ });
173
+ });
174
+ (0, import_vitest.describe)("collect TTS metrics", () => {
175
+ (0, import_vitest.it)("should aggregate TTS metrics by provider and model", () => {
176
+ const metrics1 = {
177
+ type: "tts_metrics",
178
+ label: "test",
179
+ requestId: "req1",
180
+ timestamp: Date.now(),
181
+ ttfbMs: 100,
182
+ durationMs: 500,
183
+ audioDurationMs: 3e3,
184
+ cancelled: false,
185
+ charactersCount: 100,
186
+ inputTokens: 10,
187
+ outputTokens: 20,
188
+ streamed: true,
189
+ metadata: {
190
+ modelProvider: "elevenlabs",
191
+ modelName: "eleven_turbo_v2"
192
+ }
193
+ };
194
+ const metrics2 = {
195
+ type: "tts_metrics",
196
+ label: "test",
197
+ requestId: "req2",
198
+ timestamp: Date.now(),
199
+ ttfbMs: 120,
200
+ durationMs: 600,
201
+ audioDurationMs: 4e3,
202
+ cancelled: false,
203
+ charactersCount: 200,
204
+ inputTokens: 15,
205
+ outputTokens: 25,
206
+ streamed: true,
207
+ metadata: {
208
+ modelProvider: "elevenlabs",
209
+ modelName: "eleven_turbo_v2"
210
+ }
211
+ };
212
+ collector.collect(metrics1);
213
+ collector.collect(metrics2);
214
+ const usage = collector.flatten();
215
+ (0, import_vitest.expect)(usage).toHaveLength(1);
216
+ const ttsUsage = usage[0];
217
+ (0, import_vitest.expect)(ttsUsage.type).toBe("tts_usage");
218
+ (0, import_vitest.expect)(ttsUsage.provider).toBe("elevenlabs");
219
+ (0, import_vitest.expect)(ttsUsage.model).toBe("eleven_turbo_v2");
220
+ (0, import_vitest.expect)(ttsUsage.charactersCount).toBe(300);
221
+ (0, import_vitest.expect)(ttsUsage.audioDurationMs).toBe(7e3);
222
+ (0, import_vitest.expect)(ttsUsage.inputTokens).toBe(25);
223
+ (0, import_vitest.expect)(ttsUsage.outputTokens).toBe(45);
224
+ });
225
+ });
226
+ (0, import_vitest.describe)("collect STT metrics", () => {
227
+ (0, import_vitest.it)("should aggregate STT metrics by provider and model", () => {
228
+ const metrics1 = {
229
+ type: "stt_metrics",
230
+ label: "test",
231
+ requestId: "req1",
232
+ timestamp: Date.now(),
233
+ durationMs: 0,
234
+ audioDurationMs: 5e3,
235
+ inputTokens: 50,
236
+ outputTokens: 100,
237
+ streamed: true,
238
+ metadata: {
239
+ modelProvider: "deepgram",
240
+ modelName: "nova-2"
241
+ }
242
+ };
243
+ const metrics2 = {
244
+ type: "stt_metrics",
245
+ label: "test",
246
+ requestId: "req2",
247
+ timestamp: Date.now(),
248
+ durationMs: 0,
249
+ audioDurationMs: 3e3,
250
+ inputTokens: 30,
251
+ outputTokens: 60,
252
+ streamed: true,
253
+ metadata: {
254
+ modelProvider: "deepgram",
255
+ modelName: "nova-2"
256
+ }
257
+ };
258
+ collector.collect(metrics1);
259
+ collector.collect(metrics2);
260
+ const usage = collector.flatten();
261
+ (0, import_vitest.expect)(usage).toHaveLength(1);
262
+ const sttUsage = usage[0];
263
+ (0, import_vitest.expect)(sttUsage.type).toBe("stt_usage");
264
+ (0, import_vitest.expect)(sttUsage.provider).toBe("deepgram");
265
+ (0, import_vitest.expect)(sttUsage.model).toBe("nova-2");
266
+ (0, import_vitest.expect)(sttUsage.audioDurationMs).toBe(8e3);
267
+ (0, import_vitest.expect)(sttUsage.inputTokens).toBe(80);
268
+ (0, import_vitest.expect)(sttUsage.outputTokens).toBe(160);
269
+ });
270
+ });
271
+ (0, import_vitest.describe)("collect realtime model metrics", () => {
272
+ (0, import_vitest.it)("should aggregate realtime model metrics with detailed token breakdown", () => {
273
+ const metrics = {
274
+ type: "realtime_model_metrics",
275
+ label: "test",
276
+ requestId: "req1",
277
+ timestamp: Date.now(),
278
+ durationMs: 1e3,
279
+ ttftMs: 100,
280
+ cancelled: false,
281
+ inputTokens: 500,
282
+ outputTokens: 300,
283
+ totalTokens: 800,
284
+ tokensPerSecond: 10,
285
+ sessionDurationMs: 5e3,
286
+ inputTokenDetails: {
287
+ audioTokens: 200,
288
+ textTokens: 250,
289
+ imageTokens: 50,
290
+ cachedTokens: 100,
291
+ cachedTokensDetails: {
292
+ audioTokens: 30,
293
+ textTokens: 50,
294
+ imageTokens: 20
295
+ }
296
+ },
297
+ outputTokenDetails: {
298
+ textTokens: 200,
299
+ audioTokens: 100,
300
+ imageTokens: 0
301
+ },
302
+ metadata: {
303
+ modelProvider: "openai",
304
+ modelName: "gpt-4o-realtime"
305
+ }
306
+ };
307
+ collector.collect(metrics);
308
+ const usage = collector.flatten();
309
+ (0, import_vitest.expect)(usage).toHaveLength(1);
310
+ const llmUsage = usage[0];
311
+ (0, import_vitest.expect)(llmUsage.type).toBe("llm_usage");
312
+ (0, import_vitest.expect)(llmUsage.provider).toBe("openai");
313
+ (0, import_vitest.expect)(llmUsage.model).toBe("gpt-4o-realtime");
314
+ (0, import_vitest.expect)(llmUsage.inputTokens).toBe(500);
315
+ (0, import_vitest.expect)(llmUsage.inputCachedTokens).toBe(100);
316
+ (0, import_vitest.expect)(llmUsage.inputAudioTokens).toBe(200);
317
+ (0, import_vitest.expect)(llmUsage.inputCachedAudioTokens).toBe(30);
318
+ (0, import_vitest.expect)(llmUsage.inputTextTokens).toBe(250);
319
+ (0, import_vitest.expect)(llmUsage.inputCachedTextTokens).toBe(50);
320
+ (0, import_vitest.expect)(llmUsage.inputImageTokens).toBe(50);
321
+ (0, import_vitest.expect)(llmUsage.inputCachedImageTokens).toBe(20);
322
+ (0, import_vitest.expect)(llmUsage.outputTokens).toBe(300);
323
+ (0, import_vitest.expect)(llmUsage.outputTextTokens).toBe(200);
324
+ (0, import_vitest.expect)(llmUsage.outputAudioTokens).toBe(100);
325
+ (0, import_vitest.expect)(llmUsage.sessionDurationMs).toBe(5e3);
326
+ });
327
+ });
328
+ (0, import_vitest.describe)("mixed metrics collection", () => {
329
+ (0, import_vitest.it)("should collect and separate LLM, TTS, and STT metrics", () => {
330
+ const llmMetrics = {
331
+ type: "llm_metrics",
332
+ label: "test",
333
+ requestId: "req1",
334
+ timestamp: Date.now(),
335
+ durationMs: 100,
336
+ ttftMs: 50,
337
+ cancelled: false,
338
+ completionTokens: 100,
339
+ promptTokens: 200,
340
+ promptCachedTokens: 0,
341
+ totalTokens: 300,
342
+ tokensPerSecond: 10,
343
+ metadata: {
344
+ modelProvider: "openai",
345
+ modelName: "gpt-4o"
346
+ }
347
+ };
348
+ const ttsMetrics = {
349
+ type: "tts_metrics",
350
+ label: "test",
351
+ requestId: "req2",
352
+ timestamp: Date.now(),
353
+ ttfbMs: 100,
354
+ durationMs: 500,
355
+ audioDurationMs: 3e3,
356
+ cancelled: false,
357
+ charactersCount: 100,
358
+ streamed: true,
359
+ metadata: {
360
+ modelProvider: "elevenlabs",
361
+ modelName: "eleven_turbo_v2"
362
+ }
363
+ };
364
+ const sttMetrics = {
365
+ type: "stt_metrics",
366
+ label: "test",
367
+ requestId: "req3",
368
+ timestamp: Date.now(),
369
+ durationMs: 0,
370
+ audioDurationMs: 5e3,
371
+ streamed: true,
372
+ metadata: {
373
+ modelProvider: "deepgram",
374
+ modelName: "nova-2"
375
+ }
376
+ };
377
+ collector.collect(llmMetrics);
378
+ collector.collect(ttsMetrics);
379
+ collector.collect(sttMetrics);
380
+ const usage = collector.flatten();
381
+ (0, import_vitest.expect)(usage).toHaveLength(3);
382
+ const llmUsage = usage.find((u) => u.type === "llm_usage");
383
+ const ttsUsage = usage.find((u) => u.type === "tts_usage");
384
+ const sttUsage = usage.find((u) => u.type === "stt_usage");
385
+ (0, import_vitest.expect)(llmUsage).toBeDefined();
386
+ (0, import_vitest.expect)(ttsUsage).toBeDefined();
387
+ (0, import_vitest.expect)(sttUsage).toBeDefined();
388
+ });
389
+ });
390
+ (0, import_vitest.describe)("flatten returns copies", () => {
391
+ (0, import_vitest.it)("should return deep copies of usage objects", () => {
392
+ const metrics = {
393
+ type: "llm_metrics",
394
+ label: "test",
395
+ requestId: "req1",
396
+ timestamp: Date.now(),
397
+ durationMs: 100,
398
+ ttftMs: 50,
399
+ cancelled: false,
400
+ completionTokens: 100,
401
+ promptTokens: 200,
402
+ promptCachedTokens: 0,
403
+ totalTokens: 300,
404
+ tokensPerSecond: 10,
405
+ metadata: {
406
+ modelProvider: "openai",
407
+ modelName: "gpt-4o"
408
+ }
409
+ };
410
+ collector.collect(metrics);
411
+ const usage1 = collector.flatten();
412
+ const usage2 = collector.flatten();
413
+ (0, import_vitest.expect)(usage1[0]).toEqual(usage2[0]);
414
+ (0, import_vitest.expect)(usage1[0]).not.toBe(usage2[0]);
415
+ usage1[0].inputTokens = 9999;
416
+ (0, import_vitest.expect)(usage2[0].inputTokens).toBe(200);
417
+ });
418
+ });
419
+ (0, import_vitest.describe)("handles missing metadata", () => {
420
+ (0, import_vitest.it)("should use empty strings when metadata is missing", () => {
421
+ const metrics = {
422
+ type: "llm_metrics",
423
+ label: "test",
424
+ requestId: "req1",
425
+ timestamp: Date.now(),
426
+ durationMs: 100,
427
+ ttftMs: 50,
428
+ cancelled: false,
429
+ completionTokens: 100,
430
+ promptTokens: 200,
431
+ promptCachedTokens: 0,
432
+ totalTokens: 300,
433
+ tokensPerSecond: 10
434
+ // No metadata
435
+ };
436
+ collector.collect(metrics);
437
+ const usage = collector.flatten();
438
+ (0, import_vitest.expect)(usage).toHaveLength(1);
439
+ const llmUsage = usage[0];
440
+ (0, import_vitest.expect)(llmUsage.provider).toBe("");
441
+ (0, import_vitest.expect)(llmUsage.model).toBe("");
442
+ });
443
+ });
444
+ (0, import_vitest.describe)("ignores VAD and EOU metrics", () => {
445
+ (0, import_vitest.it)("should not collect VAD metrics", () => {
446
+ const vadMetrics = {
447
+ type: "vad_metrics",
448
+ label: "test",
449
+ timestamp: Date.now(),
450
+ idleTimeMs: 100,
451
+ inferenceDurationTotalMs: 50,
452
+ inferenceCount: 10
453
+ };
454
+ collector.collect(vadMetrics);
455
+ const usage = collector.flatten();
456
+ (0, import_vitest.expect)(usage).toHaveLength(0);
457
+ });
458
+ (0, import_vitest.it)("should not collect EOU metrics", () => {
459
+ const eouMetrics = {
460
+ type: "eou_metrics",
461
+ timestamp: Date.now(),
462
+ endOfUtteranceDelayMs: 100,
463
+ transcriptionDelayMs: 50,
464
+ onUserTurnCompletedDelayMs: 30,
465
+ lastSpeakingTimeMs: Date.now()
466
+ };
467
+ collector.collect(eouMetrics);
468
+ const usage = collector.flatten();
469
+ (0, import_vitest.expect)(usage).toHaveLength(0);
470
+ });
471
+ });
472
+ });
473
+ });
474
+ //# sourceMappingURL=model_usage.test.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/metrics/model_usage.test.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { beforeEach, describe, expect, it } from 'vitest';\nimport type { LLMMetrics, RealtimeModelMetrics, STTMetrics, TTSMetrics } from './base.js';\nimport {\n type LLMModelUsage,\n ModelUsageCollector,\n type STTModelUsage,\n type TTSModelUsage,\n filterZeroValues,\n} from './model_usage.js';\n\ndescribe('model_usage', () => {\n describe('filterZeroValues', () => {\n it('should filter out zero values from LLMModelUsage', () => {\n const usage: LLMModelUsage = {\n type: 'llm_usage',\n provider: 'openai',\n model: 'gpt-4o',\n inputTokens: 100,\n inputCachedTokens: 0,\n inputAudioTokens: 0,\n inputCachedAudioTokens: 0,\n inputTextTokens: 0,\n inputCachedTextTokens: 0,\n inputImageTokens: 0,\n inputCachedImageTokens: 0,\n outputTokens: 50,\n outputAudioTokens: 0,\n outputTextTokens: 0,\n sessionDurationMs: 0,\n };\n\n const filtered = filterZeroValues(usage);\n\n expect(filtered.type).toBe('llm_usage');\n expect(filtered.provider).toBe('openai');\n expect(filtered.model).toBe('gpt-4o');\n expect(filtered.inputTokens).toBe(100);\n expect(filtered.outputTokens).toBe(50);\n // Zero values should be filtered out\n expect(filtered.inputCachedTokens).toBeUndefined();\n expect(filtered.inputAudioTokens).toBeUndefined();\n expect(filtered.sessionDurationMs).toBeUndefined();\n });\n\n it('should filter out zero values from TTSModelUsage', () => {\n const usage: TTSModelUsage = {\n type: 'tts_usage',\n provider: 'elevenlabs',\n model: 'eleven_turbo_v2',\n inputTokens: 0,\n outputTokens: 0,\n charactersCount: 500,\n audioDurationMs: 3000,\n };\n\n const filtered = filterZeroValues(usage);\n\n expect(filtered.type).toBe('tts_usage');\n expect(filtered.provider).toBe('elevenlabs');\n expect(filtered.charactersCount).toBe(500);\n expect(filtered.audioDurationMs).toBe(3000);\n expect(filtered.inputTokens).toBeUndefined();\n expect(filtered.outputTokens).toBeUndefined();\n });\n\n it('should keep all values when none are zero', () => {\n const usage: STTModelUsage = {\n type: 'stt_usage',\n provider: 'deepgram',\n model: 'nova-2',\n inputTokens: 10,\n outputTokens: 20,\n audioDurationMs: 5000,\n };\n\n const filtered = filterZeroValues(usage);\n\n expect(Object.keys(filtered)).toHaveLength(6);\n expect(filtered).toEqual(usage);\n });\n });\n\n describe('ModelUsageCollector', () => {\n let collector: ModelUsageCollector;\n\n beforeEach(() => {\n collector = new ModelUsageCollector();\n });\n\n describe('collect LLM metrics', () => {\n it('should aggregate LLM metrics by provider and model', () => {\n const metrics1: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 50,\n totalTokens: 300,\n tokensPerSecond: 10,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n const metrics2: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n durationMs: 150,\n ttftMs: 60,\n cancelled: false,\n completionTokens: 150,\n promptTokens: 300,\n promptCachedTokens: 75,\n totalTokens: 450,\n tokensPerSecond: 12,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n collector.collect(metrics1);\n collector.collect(metrics2);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const llmUsage = usage[0] as LLMModelUsage;\n expect(llmUsage.type).toBe('llm_usage');\n expect(llmUsage.provider).toBe('openai');\n expect(llmUsage.model).toBe('gpt-4o');\n expect(llmUsage.inputTokens).toBe(500); // 200 + 300\n expect(llmUsage.inputCachedTokens).toBe(125); // 50 + 75\n expect(llmUsage.outputTokens).toBe(250); // 100 + 150\n });\n\n it('should separate metrics by different providers', () => {\n const openaiMetrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 0,\n totalTokens: 300,\n tokensPerSecond: 10,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n const anthropicMetrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n durationMs: 120,\n ttftMs: 55,\n cancelled: false,\n completionTokens: 80,\n promptTokens: 150,\n promptCachedTokens: 0,\n totalTokens: 230,\n tokensPerSecond: 8,\n metadata: {\n modelProvider: 'anthropic',\n modelName: 'claude-3-5-sonnet',\n },\n };\n\n collector.collect(openaiMetrics);\n collector.collect(anthropicMetrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(2);\n\n const openaiUsage = usage.find(\n (u) => u.type === 'llm_usage' && u.provider === 'openai',\n ) as LLMModelUsage;\n const anthropicUsage = usage.find(\n (u) => u.type === 'llm_usage' && u.provider === 'anthropic',\n ) as LLMModelUsage;\n\n expect(openaiUsage.inputTokens).toBe(200);\n expect(openaiUsage.outputTokens).toBe(100);\n expect(anthropicUsage.inputTokens).toBe(150);\n expect(anthropicUsage.outputTokens).toBe(80);\n });\n });\n\n describe('collect TTS metrics', () => {\n it('should aggregate TTS metrics by provider and model', () => {\n const metrics1: TTSMetrics = {\n type: 'tts_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n ttfbMs: 100,\n durationMs: 500,\n audioDurationMs: 3000,\n cancelled: false,\n charactersCount: 100,\n inputTokens: 10,\n outputTokens: 20,\n streamed: true,\n metadata: {\n modelProvider: 'elevenlabs',\n modelName: 'eleven_turbo_v2',\n },\n };\n\n const metrics2: TTSMetrics = {\n type: 'tts_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n ttfbMs: 120,\n durationMs: 600,\n audioDurationMs: 4000,\n cancelled: false,\n charactersCount: 200,\n inputTokens: 15,\n outputTokens: 25,\n streamed: true,\n metadata: {\n modelProvider: 'elevenlabs',\n modelName: 'eleven_turbo_v2',\n },\n };\n\n collector.collect(metrics1);\n collector.collect(metrics2);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const ttsUsage = usage[0] as TTSModelUsage;\n expect(ttsUsage.type).toBe('tts_usage');\n expect(ttsUsage.provider).toBe('elevenlabs');\n expect(ttsUsage.model).toBe('eleven_turbo_v2');\n expect(ttsUsage.charactersCount).toBe(300); // 100 + 200\n expect(ttsUsage.audioDurationMs).toBe(7000); // 3000 + 4000\n expect(ttsUsage.inputTokens).toBe(25); // 10 + 15\n expect(ttsUsage.outputTokens).toBe(45); // 20 + 25\n });\n });\n\n describe('collect STT metrics', () => {\n it('should aggregate STT metrics by provider and model', () => {\n const metrics1: STTMetrics = {\n type: 'stt_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 0,\n audioDurationMs: 5000,\n inputTokens: 50,\n outputTokens: 100,\n streamed: true,\n metadata: {\n modelProvider: 'deepgram',\n modelName: 'nova-2',\n },\n };\n\n const metrics2: STTMetrics = {\n type: 'stt_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n durationMs: 0,\n audioDurationMs: 3000,\n inputTokens: 30,\n outputTokens: 60,\n streamed: true,\n metadata: {\n modelProvider: 'deepgram',\n modelName: 'nova-2',\n },\n };\n\n collector.collect(metrics1);\n collector.collect(metrics2);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const sttUsage = usage[0] as STTModelUsage;\n expect(sttUsage.type).toBe('stt_usage');\n expect(sttUsage.provider).toBe('deepgram');\n expect(sttUsage.model).toBe('nova-2');\n expect(sttUsage.audioDurationMs).toBe(8000); // 5000 + 3000\n expect(sttUsage.inputTokens).toBe(80); // 50 + 30\n expect(sttUsage.outputTokens).toBe(160); // 100 + 60\n });\n });\n\n describe('collect realtime model metrics', () => {\n it('should aggregate realtime model metrics with detailed token breakdown', () => {\n const metrics: RealtimeModelMetrics = {\n type: 'realtime_model_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 1000,\n ttftMs: 100,\n cancelled: false,\n inputTokens: 500,\n outputTokens: 300,\n totalTokens: 800,\n tokensPerSecond: 10,\n sessionDurationMs: 5000,\n inputTokenDetails: {\n audioTokens: 200,\n textTokens: 250,\n imageTokens: 50,\n cachedTokens: 100,\n cachedTokensDetails: {\n audioTokens: 30,\n textTokens: 50,\n imageTokens: 20,\n },\n },\n outputTokenDetails: {\n textTokens: 200,\n audioTokens: 100,\n imageTokens: 0,\n },\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o-realtime',\n },\n };\n\n collector.collect(metrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const llmUsage = usage[0] as LLMModelUsage;\n expect(llmUsage.type).toBe('llm_usage');\n expect(llmUsage.provider).toBe('openai');\n expect(llmUsage.model).toBe('gpt-4o-realtime');\n expect(llmUsage.inputTokens).toBe(500);\n expect(llmUsage.inputCachedTokens).toBe(100);\n expect(llmUsage.inputAudioTokens).toBe(200);\n expect(llmUsage.inputCachedAudioTokens).toBe(30);\n expect(llmUsage.inputTextTokens).toBe(250);\n expect(llmUsage.inputCachedTextTokens).toBe(50);\n expect(llmUsage.inputImageTokens).toBe(50);\n expect(llmUsage.inputCachedImageTokens).toBe(20);\n expect(llmUsage.outputTokens).toBe(300);\n expect(llmUsage.outputTextTokens).toBe(200);\n expect(llmUsage.outputAudioTokens).toBe(100);\n expect(llmUsage.sessionDurationMs).toBe(5000);\n });\n });\n\n describe('mixed metrics collection', () => {\n it('should collect and separate LLM, TTS, and STT metrics', () => {\n const llmMetrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 0,\n totalTokens: 300,\n tokensPerSecond: 10,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n const ttsMetrics: TTSMetrics = {\n type: 'tts_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n ttfbMs: 100,\n durationMs: 500,\n audioDurationMs: 3000,\n cancelled: false,\n charactersCount: 100,\n streamed: true,\n metadata: {\n modelProvider: 'elevenlabs',\n modelName: 'eleven_turbo_v2',\n },\n };\n\n const sttMetrics: STTMetrics = {\n type: 'stt_metrics',\n label: 'test',\n requestId: 'req3',\n timestamp: Date.now(),\n durationMs: 0,\n audioDurationMs: 5000,\n streamed: true,\n metadata: {\n modelProvider: 'deepgram',\n modelName: 'nova-2',\n },\n };\n\n collector.collect(llmMetrics);\n collector.collect(ttsMetrics);\n collector.collect(sttMetrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(3);\n\n const llmUsage = usage.find((u) => u.type === 'llm_usage');\n const ttsUsage = usage.find((u) => u.type === 'tts_usage');\n const sttUsage = usage.find((u) => u.type === 'stt_usage');\n\n expect(llmUsage).toBeDefined();\n expect(ttsUsage).toBeDefined();\n expect(sttUsage).toBeDefined();\n });\n });\n\n describe('flatten returns copies', () => {\n it('should return deep copies of usage objects', () => {\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 0,\n totalTokens: 300,\n tokensPerSecond: 10,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n collector.collect(metrics);\n\n const usage1 = collector.flatten();\n const usage2 = collector.flatten();\n\n // Should be equal values\n expect(usage1[0]).toEqual(usage2[0]);\n\n // But not the same object reference\n expect(usage1[0]).not.toBe(usage2[0]);\n\n // Modifying one shouldn't affect the other\n (usage1[0] as LLMModelUsage).inputTokens = 9999;\n expect((usage2[0] as LLMModelUsage).inputTokens).toBe(200);\n });\n });\n\n describe('handles missing metadata', () => {\n it('should use empty strings when metadata is missing', () => {\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 0,\n totalTokens: 300,\n tokensPerSecond: 10,\n // No metadata\n };\n\n collector.collect(metrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const llmUsage = usage[0] as LLMModelUsage;\n expect(llmUsage.provider).toBe('');\n expect(llmUsage.model).toBe('');\n });\n });\n\n describe('ignores VAD and EOU metrics', () => {\n it('should not collect VAD metrics', () => {\n const vadMetrics = {\n type: 'vad_metrics' as const,\n label: 'test',\n timestamp: Date.now(),\n idleTimeMs: 100,\n inferenceDurationTotalMs: 50,\n inferenceCount: 10,\n };\n\n collector.collect(vadMetrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(0);\n });\n\n it('should not collect EOU metrics', () => {\n const eouMetrics = {\n type: 'eou_metrics' as const,\n timestamp: Date.now(),\n endOfUtteranceDelayMs: 100,\n transcriptionDelayMs: 50,\n onUserTurnCompletedDelayMs: 30,\n lastSpeakingTimeMs: Date.now(),\n };\n\n collector.collect(eouMetrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(0);\n });\n });\n });\n});\n"],"mappings":";AAGA,oBAAiD;AAEjD,yBAMO;AAAA,IAEP,wBAAS,eAAe,MAAM;AAC5B,8BAAS,oBAAoB,MAAM;AACjC,0BAAG,oDAAoD,MAAM;AAC3D,YAAM,QAAuB;AAAA,QAC3B,MAAM;AAAA,QACN,UAAU;AAAA,QACV,OAAO;AAAA,QACP,aAAa;AAAA,QACb,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,wBAAwB;AAAA,QACxB,iBAAiB;AAAA,QACjB,uBAAuB;AAAA,QACvB,kBAAkB;AAAA,QAClB,wBAAwB;AAAA,QACxB,cAAc;AAAA,QACd,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,mBAAmB;AAAA,MACrB;AAEA,YAAM,eAAW,qCAAiB,KAAK;AAEvC,gCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,gCAAO,SAAS,QAAQ,EAAE,KAAK,QAAQ;AACvC,gCAAO,SAAS,KAAK,EAAE,KAAK,QAAQ;AACpC,gCAAO,SAAS,WAAW,EAAE,KAAK,GAAG;AACrC,gCAAO,SAAS,YAAY,EAAE,KAAK,EAAE;AAErC,gCAAO,SAAS,iBAAiB,EAAE,cAAc;AACjD,gCAAO,SAAS,gBAAgB,EAAE,cAAc;AAChD,gCAAO,SAAS,iBAAiB,EAAE,cAAc;AAAA,IACnD,CAAC;AAED,0BAAG,oDAAoD,MAAM;AAC3D,YAAM,QAAuB;AAAA,QAC3B,MAAM;AAAA,QACN,UAAU;AAAA,QACV,OAAO;AAAA,QACP,aAAa;AAAA,QACb,cAAc;AAAA,QACd,iBAAiB;AAAA,QACjB,iBAAiB;AAAA,MACnB;AAEA,YAAM,eAAW,qCAAiB,KAAK;AAEvC,gCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,gCAAO,SAAS,QAAQ,EAAE,KAAK,YAAY;AAC3C,gCAAO,SAAS,eAAe,EAAE,KAAK,GAAG;AACzC,gCAAO,SAAS,eAAe,EAAE,KAAK,GAAI;AAC1C,gCAAO,SAAS,WAAW,EAAE,cAAc;AAC3C,gCAAO,SAAS,YAAY,EAAE,cAAc;AAAA,IAC9C,CAAC;AAED,0BAAG,6CAA6C,MAAM;AACpD,YAAM,QAAuB;AAAA,QAC3B,MAAM;AAAA,QACN,UAAU;AAAA,QACV,OAAO;AAAA,QACP,aAAa;AAAA,QACb,cAAc;AAAA,QACd,iBAAiB;AAAA,MACnB;AAEA,YAAM,eAAW,qCAAiB,KAAK;AAEvC,gCAAO,OAAO,KAAK,QAAQ,CAAC,EAAE,aAAa,CAAC;AAC5C,gCAAO,QAAQ,EAAE,QAAQ,KAAK;AAAA,IAChC,CAAC;AAAA,EACH,CAAC;AAED,8BAAS,uBAAuB,MAAM;AACpC,QAAI;AAEJ,kCAAW,MAAM;AACf,kBAAY,IAAI,uCAAoB;AAAA,IACtC,CAAC;AAED,gCAAS,uBAAuB,MAAM;AACpC,4BAAG,sDAAsD,MAAM;AAC7D,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,QAAQ,QAAQ;AAE1B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,kCAAO,SAAS,QAAQ,EAAE,KAAK,QAAQ;AACvC,kCAAO,SAAS,KAAK,EAAE,KAAK,QAAQ;AACpC,kCAAO,SAAS,WAAW,EAAE,KAAK,GAAG;AACrC,kCAAO,SAAS,iBAAiB,EAAE,KAAK,GAAG;AAC3C,kCAAO,SAAS,YAAY,EAAE,KAAK,GAAG;AAAA,MACxC,CAAC;AAED,4BAAG,kDAAkD,MAAM;AACzD,cAAM,gBAA4B;AAAA,UAChC,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,mBAA+B;AAAA,UACnC,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,aAAa;AAC/B,kBAAU,QAAQ,gBAAgB;AAElC,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,cAAc,MAAM;AAAA,UACxB,CAAC,MAAM,EAAE,SAAS,eAAe,EAAE,aAAa;AAAA,QAClD;AACA,cAAM,iBAAiB,MAAM;AAAA,UAC3B,CAAC,MAAM,EAAE,SAAS,eAAe,EAAE,aAAa;AAAA,QAClD;AAEA,kCAAO,YAAY,WAAW,EAAE,KAAK,GAAG;AACxC,kCAAO,YAAY,YAAY,EAAE,KAAK,GAAG;AACzC,kCAAO,eAAe,WAAW,EAAE,KAAK,GAAG;AAC3C,kCAAO,eAAe,YAAY,EAAE,KAAK,EAAE;AAAA,MAC7C,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,uBAAuB,MAAM;AACpC,4BAAG,sDAAsD,MAAM;AAC7D,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,WAAW;AAAA,UACX,iBAAiB;AAAA,UACjB,aAAa;AAAA,UACb,cAAc;AAAA,UACd,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,WAAW;AAAA,UACX,iBAAiB;AAAA,UACjB,aAAa;AAAA,UACb,cAAc;AAAA,UACd,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,QAAQ,QAAQ;AAE1B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,kCAAO,SAAS,QAAQ,EAAE,KAAK,YAAY;AAC3C,kCAAO,SAAS,KAAK,EAAE,KAAK,iBAAiB;AAC7C,kCAAO,SAAS,eAAe,EAAE,KAAK,GAAG;AACzC,kCAAO,SAAS,eAAe,EAAE,KAAK,GAAI;AAC1C,kCAAO,SAAS,WAAW,EAAE,KAAK,EAAE;AACpC,kCAAO,SAAS,YAAY,EAAE,KAAK,EAAE;AAAA,MACvC,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,uBAAuB,MAAM;AACpC,4BAAG,sDAAsD,MAAM;AAC7D,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,aAAa;AAAA,UACb,cAAc;AAAA,UACd,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,aAAa;AAAA,UACb,cAAc;AAAA,UACd,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,QAAQ,QAAQ;AAE1B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,kCAAO,SAAS,QAAQ,EAAE,KAAK,UAAU;AACzC,kCAAO,SAAS,KAAK,EAAE,KAAK,QAAQ;AACpC,kCAAO,SAAS,eAAe,EAAE,KAAK,GAAI;AAC1C,kCAAO,SAAS,WAAW,EAAE,KAAK,EAAE;AACpC,kCAAO,SAAS,YAAY,EAAE,KAAK,GAAG;AAAA,MACxC,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,kCAAkC,MAAM;AAC/C,4BAAG,yEAAyE,MAAM;AAChF,cAAM,UAAgC;AAAA,UACpC,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,aAAa;AAAA,UACb,cAAc;AAAA,UACd,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,mBAAmB;AAAA,UACnB,mBAAmB;AAAA,YACjB,aAAa;AAAA,YACb,YAAY;AAAA,YACZ,aAAa;AAAA,YACb,cAAc;AAAA,YACd,qBAAqB;AAAA,cACnB,aAAa;AAAA,cACb,YAAY;AAAA,cACZ,aAAa;AAAA,YACf;AAAA,UACF;AAAA,UACA,oBAAoB;AAAA,YAClB,YAAY;AAAA,YACZ,aAAa;AAAA,YACb,aAAa;AAAA,UACf;AAAA,UACA,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,OAAO;AAEzB,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,kCAAO,SAAS,QAAQ,EAAE,KAAK,QAAQ;AACvC,kCAAO,SAAS,KAAK,EAAE,KAAK,iBAAiB;AAC7C,kCAAO,SAAS,WAAW,EAAE,KAAK,GAAG;AACrC,kCAAO,SAAS,iBAAiB,EAAE,KAAK,GAAG;AAC3C,kCAAO,SAAS,gBAAgB,EAAE,KAAK,GAAG;AAC1C,kCAAO,SAAS,sBAAsB,EAAE,KAAK,EAAE;AAC/C,kCAAO,SAAS,eAAe,EAAE,KAAK,GAAG;AACzC,kCAAO,SAAS,qBAAqB,EAAE,KAAK,EAAE;AAC9C,kCAAO,SAAS,gBAAgB,EAAE,KAAK,EAAE;AACzC,kCAAO,SAAS,sBAAsB,EAAE,KAAK,EAAE;AAC/C,kCAAO,SAAS,YAAY,EAAE,KAAK,GAAG;AACtC,kCAAO,SAAS,gBAAgB,EAAE,KAAK,GAAG;AAC1C,kCAAO,SAAS,iBAAiB,EAAE,KAAK,GAAG;AAC3C,kCAAO,SAAS,iBAAiB,EAAE,KAAK,GAAI;AAAA,MAC9C,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,4BAA4B,MAAM;AACzC,4BAAG,yDAAyD,MAAM;AAChE,cAAM,aAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,aAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,WAAW;AAAA,UACX,iBAAiB;AAAA,UACjB,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,aAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,UAAU;AAC5B,kBAAU,QAAQ,UAAU;AAC5B,kBAAU,QAAQ,UAAU;AAE5B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,WAAW;AACzD,cAAM,WAAW,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,WAAW;AACzD,cAAM,WAAW,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,WAAW;AAEzD,kCAAO,QAAQ,EAAE,YAAY;AAC7B,kCAAO,QAAQ,EAAE,YAAY;AAC7B,kCAAO,QAAQ,EAAE,YAAY;AAAA,MAC/B,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,0BAA0B,MAAM;AACvC,4BAAG,8CAA8C,MAAM;AACrD,cAAM,UAAsB;AAAA,UAC1B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,OAAO;AAEzB,cAAM,SAAS,UAAU,QAAQ;AACjC,cAAM,SAAS,UAAU,QAAQ;AAGjC,kCAAO,OAAO,CAAC,CAAC,EAAE,QAAQ,OAAO,CAAC,CAAC;AAGnC,kCAAO,OAAO,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAC,CAAC;AAGpC,QAAC,OAAO,CAAC,EAAoB,cAAc;AAC3C,kCAAQ,OAAO,CAAC,EAAoB,WAAW,EAAE,KAAK,GAAG;AAAA,MAC3D,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,4BAA4B,MAAM;AACzC,4BAAG,qDAAqD,MAAM;AAC5D,cAAM,UAAsB;AAAA,UAC1B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA;AAAA,QAEnB;AAEA,kBAAU,QAAQ,OAAO;AAEzB,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,QAAQ,EAAE,KAAK,EAAE;AACjC,kCAAO,SAAS,KAAK,EAAE,KAAK,EAAE;AAAA,MAChC,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,+BAA+B,MAAM;AAC5C,4BAAG,kCAAkC,MAAM;AACzC,cAAM,aAAa;AAAA,UACjB,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,0BAA0B;AAAA,UAC1B,gBAAgB;AAAA,QAClB;AAEA,kBAAU,QAAQ,UAAU;AAE5B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAAA,MAC9B,CAAC;AAED,4BAAG,kCAAkC,MAAM;AACzC,cAAM,aAAa;AAAA,UACjB,MAAM;AAAA,UACN,WAAW,KAAK,IAAI;AAAA,UACpB,uBAAuB;AAAA,UACvB,sBAAsB;AAAA,UACtB,4BAA4B;AAAA,UAC5B,oBAAoB,KAAK,IAAI;AAAA,QAC/B;AAEA,kBAAU,QAAQ,UAAU;AAE5B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAAA,MAC9B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH,CAAC;","names":[]}