@livekit/agents 1.0.48 → 1.1.0-dev.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (373) hide show
  1. package/dist/constants.cjs +27 -0
  2. package/dist/constants.cjs.map +1 -1
  3. package/dist/constants.d.cts +9 -0
  4. package/dist/constants.d.ts +9 -0
  5. package/dist/constants.d.ts.map +1 -1
  6. package/dist/constants.js +18 -0
  7. package/dist/constants.js.map +1 -1
  8. package/dist/inference/api_protos.d.cts +71 -71
  9. package/dist/inference/api_protos.d.ts +71 -71
  10. package/dist/inference/interruption/defaults.cjs +81 -0
  11. package/dist/inference/interruption/defaults.cjs.map +1 -0
  12. package/dist/inference/interruption/defaults.d.cts +19 -0
  13. package/dist/inference/interruption/defaults.d.ts +19 -0
  14. package/dist/inference/interruption/defaults.d.ts.map +1 -0
  15. package/dist/inference/interruption/defaults.js +46 -0
  16. package/dist/inference/interruption/defaults.js.map +1 -0
  17. package/dist/inference/interruption/errors.cjs +44 -0
  18. package/dist/inference/interruption/errors.cjs.map +1 -0
  19. package/dist/inference/interruption/errors.d.cts +12 -0
  20. package/dist/inference/interruption/errors.d.ts +12 -0
  21. package/dist/inference/interruption/errors.d.ts.map +1 -0
  22. package/dist/inference/interruption/errors.js +20 -0
  23. package/dist/inference/interruption/errors.js.map +1 -0
  24. package/dist/inference/interruption/http_transport.cjs +147 -0
  25. package/dist/inference/interruption/http_transport.cjs.map +1 -0
  26. package/dist/inference/interruption/http_transport.d.cts +63 -0
  27. package/dist/inference/interruption/http_transport.d.ts +63 -0
  28. package/dist/inference/interruption/http_transport.d.ts.map +1 -0
  29. package/dist/inference/interruption/http_transport.js +121 -0
  30. package/dist/inference/interruption/http_transport.js.map +1 -0
  31. package/dist/inference/interruption/interruption_cache_entry.cjs +58 -0
  32. package/dist/inference/interruption/interruption_cache_entry.cjs.map +1 -0
  33. package/dist/inference/interruption/interruption_cache_entry.d.cts +30 -0
  34. package/dist/inference/interruption/interruption_cache_entry.d.ts +30 -0
  35. package/dist/inference/interruption/interruption_cache_entry.d.ts.map +1 -0
  36. package/dist/inference/interruption/interruption_cache_entry.js +34 -0
  37. package/dist/inference/interruption/interruption_cache_entry.js.map +1 -0
  38. package/dist/inference/interruption/interruption_detector.cjs +181 -0
  39. package/dist/inference/interruption/interruption_detector.cjs.map +1 -0
  40. package/dist/inference/interruption/interruption_detector.d.cts +59 -0
  41. package/dist/inference/interruption/interruption_detector.d.ts +59 -0
  42. package/dist/inference/interruption/interruption_detector.d.ts.map +1 -0
  43. package/dist/inference/interruption/interruption_detector.js +147 -0
  44. package/dist/inference/interruption/interruption_detector.js.map +1 -0
  45. package/dist/inference/interruption/interruption_stream.cjs +368 -0
  46. package/dist/inference/interruption/interruption_stream.cjs.map +1 -0
  47. package/dist/inference/interruption/interruption_stream.d.cts +46 -0
  48. package/dist/inference/interruption/interruption_stream.d.ts +46 -0
  49. package/dist/inference/interruption/interruption_stream.d.ts.map +1 -0
  50. package/dist/inference/interruption/interruption_stream.js +344 -0
  51. package/dist/inference/interruption/interruption_stream.js.map +1 -0
  52. package/dist/inference/interruption/types.cjs +17 -0
  53. package/dist/inference/interruption/types.cjs.map +1 -0
  54. package/dist/inference/interruption/types.d.cts +66 -0
  55. package/dist/inference/interruption/types.d.ts +66 -0
  56. package/dist/inference/interruption/types.d.ts.map +1 -0
  57. package/dist/inference/interruption/types.js +1 -0
  58. package/dist/inference/interruption/types.js.map +1 -0
  59. package/dist/inference/interruption/utils.cjs +130 -0
  60. package/dist/inference/interruption/utils.cjs.map +1 -0
  61. package/dist/inference/interruption/utils.d.cts +41 -0
  62. package/dist/inference/interruption/utils.d.ts +41 -0
  63. package/dist/inference/interruption/utils.d.ts.map +1 -0
  64. package/dist/inference/interruption/utils.js +105 -0
  65. package/dist/inference/interruption/utils.js.map +1 -0
  66. package/dist/inference/interruption/utils.test.cjs +105 -0
  67. package/dist/inference/interruption/utils.test.cjs.map +1 -0
  68. package/dist/inference/interruption/utils.test.js +104 -0
  69. package/dist/inference/interruption/utils.test.js.map +1 -0
  70. package/dist/inference/interruption/ws_transport.cjs +329 -0
  71. package/dist/inference/interruption/ws_transport.cjs.map +1 -0
  72. package/dist/inference/interruption/ws_transport.d.cts +33 -0
  73. package/dist/inference/interruption/ws_transport.d.ts +33 -0
  74. package/dist/inference/interruption/ws_transport.d.ts.map +1 -0
  75. package/dist/inference/interruption/ws_transport.js +295 -0
  76. package/dist/inference/interruption/ws_transport.js.map +1 -0
  77. package/dist/inference/llm.cjs +14 -10
  78. package/dist/inference/llm.cjs.map +1 -1
  79. package/dist/inference/llm.d.cts +2 -1
  80. package/dist/inference/llm.d.ts +2 -1
  81. package/dist/inference/llm.d.ts.map +1 -1
  82. package/dist/inference/llm.js +8 -10
  83. package/dist/inference/llm.js.map +1 -1
  84. package/dist/inference/stt.cjs +7 -2
  85. package/dist/inference/stt.cjs.map +1 -1
  86. package/dist/inference/stt.d.cts +2 -0
  87. package/dist/inference/stt.d.ts +2 -0
  88. package/dist/inference/stt.d.ts.map +1 -1
  89. package/dist/inference/stt.js +8 -3
  90. package/dist/inference/stt.js.map +1 -1
  91. package/dist/inference/tts.cjs +7 -2
  92. package/dist/inference/tts.cjs.map +1 -1
  93. package/dist/inference/tts.d.cts +2 -0
  94. package/dist/inference/tts.d.ts +2 -0
  95. package/dist/inference/tts.d.ts.map +1 -1
  96. package/dist/inference/tts.js +8 -3
  97. package/dist/inference/tts.js.map +1 -1
  98. package/dist/inference/utils.cjs +26 -7
  99. package/dist/inference/utils.cjs.map +1 -1
  100. package/dist/inference/utils.d.cts +13 -0
  101. package/dist/inference/utils.d.ts +13 -0
  102. package/dist/inference/utils.d.ts.map +1 -1
  103. package/dist/inference/utils.js +18 -2
  104. package/dist/inference/utils.js.map +1 -1
  105. package/dist/llm/chat_context.cjs +20 -2
  106. package/dist/llm/chat_context.cjs.map +1 -1
  107. package/dist/llm/chat_context.d.cts +19 -1
  108. package/dist/llm/chat_context.d.ts +19 -1
  109. package/dist/llm/chat_context.d.ts.map +1 -1
  110. package/dist/llm/chat_context.js +20 -2
  111. package/dist/llm/chat_context.js.map +1 -1
  112. package/dist/llm/index.cjs.map +1 -1
  113. package/dist/llm/index.d.cts +1 -1
  114. package/dist/llm/index.d.ts +1 -1
  115. package/dist/llm/index.d.ts.map +1 -1
  116. package/dist/llm/index.js.map +1 -1
  117. package/dist/llm/llm.cjs +16 -1
  118. package/dist/llm/llm.cjs.map +1 -1
  119. package/dist/llm/llm.d.cts +9 -0
  120. package/dist/llm/llm.d.ts +9 -0
  121. package/dist/llm/llm.d.ts.map +1 -1
  122. package/dist/llm/llm.js +16 -1
  123. package/dist/llm/llm.js.map +1 -1
  124. package/dist/llm/realtime.cjs +3 -0
  125. package/dist/llm/realtime.cjs.map +1 -1
  126. package/dist/llm/realtime.d.cts +1 -0
  127. package/dist/llm/realtime.d.ts +1 -0
  128. package/dist/llm/realtime.d.ts.map +1 -1
  129. package/dist/llm/realtime.js +3 -0
  130. package/dist/llm/realtime.js.map +1 -1
  131. package/dist/metrics/base.cjs.map +1 -1
  132. package/dist/metrics/base.d.cts +45 -1
  133. package/dist/metrics/base.d.ts +45 -1
  134. package/dist/metrics/base.d.ts.map +1 -1
  135. package/dist/metrics/index.cjs +5 -0
  136. package/dist/metrics/index.cjs.map +1 -1
  137. package/dist/metrics/index.d.cts +2 -1
  138. package/dist/metrics/index.d.ts +2 -1
  139. package/dist/metrics/index.d.ts.map +1 -1
  140. package/dist/metrics/index.js +6 -0
  141. package/dist/metrics/index.js.map +1 -1
  142. package/dist/metrics/model_usage.cjs +189 -0
  143. package/dist/metrics/model_usage.cjs.map +1 -0
  144. package/dist/metrics/model_usage.d.cts +92 -0
  145. package/dist/metrics/model_usage.d.ts +92 -0
  146. package/dist/metrics/model_usage.d.ts.map +1 -0
  147. package/dist/metrics/model_usage.js +164 -0
  148. package/dist/metrics/model_usage.js.map +1 -0
  149. package/dist/metrics/model_usage.test.cjs +474 -0
  150. package/dist/metrics/model_usage.test.cjs.map +1 -0
  151. package/dist/metrics/model_usage.test.js +476 -0
  152. package/dist/metrics/model_usage.test.js.map +1 -0
  153. package/dist/metrics/usage_collector.cjs +3 -0
  154. package/dist/metrics/usage_collector.cjs.map +1 -1
  155. package/dist/metrics/usage_collector.d.cts +9 -0
  156. package/dist/metrics/usage_collector.d.ts +9 -0
  157. package/dist/metrics/usage_collector.d.ts.map +1 -1
  158. package/dist/metrics/usage_collector.js +3 -0
  159. package/dist/metrics/usage_collector.js.map +1 -1
  160. package/dist/metrics/utils.cjs +9 -0
  161. package/dist/metrics/utils.cjs.map +1 -1
  162. package/dist/metrics/utils.d.ts.map +1 -1
  163. package/dist/metrics/utils.js +9 -0
  164. package/dist/metrics/utils.js.map +1 -1
  165. package/dist/stream/multi_input_stream.test.cjs +4 -0
  166. package/dist/stream/multi_input_stream.test.cjs.map +1 -1
  167. package/dist/stream/multi_input_stream.test.js +5 -1
  168. package/dist/stream/multi_input_stream.test.js.map +1 -1
  169. package/dist/stream/stream_channel.cjs +31 -0
  170. package/dist/stream/stream_channel.cjs.map +1 -1
  171. package/dist/stream/stream_channel.d.cts +4 -2
  172. package/dist/stream/stream_channel.d.ts +4 -2
  173. package/dist/stream/stream_channel.d.ts.map +1 -1
  174. package/dist/stream/stream_channel.js +31 -0
  175. package/dist/stream/stream_channel.js.map +1 -1
  176. package/dist/stt/stt.cjs +34 -2
  177. package/dist/stt/stt.cjs.map +1 -1
  178. package/dist/stt/stt.d.cts +22 -0
  179. package/dist/stt/stt.d.ts +22 -0
  180. package/dist/stt/stt.d.ts.map +1 -1
  181. package/dist/stt/stt.js +34 -2
  182. package/dist/stt/stt.js.map +1 -1
  183. package/dist/telemetry/otel_http_exporter.cjs +24 -5
  184. package/dist/telemetry/otel_http_exporter.cjs.map +1 -1
  185. package/dist/telemetry/otel_http_exporter.d.cts +1 -0
  186. package/dist/telemetry/otel_http_exporter.d.ts +1 -0
  187. package/dist/telemetry/otel_http_exporter.d.ts.map +1 -1
  188. package/dist/telemetry/otel_http_exporter.js +24 -5
  189. package/dist/telemetry/otel_http_exporter.js.map +1 -1
  190. package/dist/telemetry/trace_types.cjs +5 -5
  191. package/dist/telemetry/trace_types.cjs.map +1 -1
  192. package/dist/telemetry/trace_types.d.cts +9 -5
  193. package/dist/telemetry/trace_types.d.ts +9 -5
  194. package/dist/telemetry/trace_types.d.ts.map +1 -1
  195. package/dist/telemetry/trace_types.js +5 -5
  196. package/dist/telemetry/trace_types.js.map +1 -1
  197. package/dist/telemetry/traces.cjs +47 -8
  198. package/dist/telemetry/traces.cjs.map +1 -1
  199. package/dist/telemetry/traces.d.ts.map +1 -1
  200. package/dist/telemetry/traces.js +47 -8
  201. package/dist/telemetry/traces.js.map +1 -1
  202. package/dist/tts/tts.cjs +64 -2
  203. package/dist/tts/tts.cjs.map +1 -1
  204. package/dist/tts/tts.d.cts +34 -0
  205. package/dist/tts/tts.d.ts +34 -0
  206. package/dist/tts/tts.d.ts.map +1 -1
  207. package/dist/tts/tts.js +64 -2
  208. package/dist/tts/tts.js.map +1 -1
  209. package/dist/version.cjs +1 -1
  210. package/dist/version.js +1 -1
  211. package/dist/voice/agent.cjs +25 -4
  212. package/dist/voice/agent.cjs.map +1 -1
  213. package/dist/voice/agent.d.cts +10 -2
  214. package/dist/voice/agent.d.ts +10 -2
  215. package/dist/voice/agent.d.ts.map +1 -1
  216. package/dist/voice/agent.js +25 -4
  217. package/dist/voice/agent.js.map +1 -1
  218. package/dist/voice/agent_activity.cjs +261 -36
  219. package/dist/voice/agent_activity.cjs.map +1 -1
  220. package/dist/voice/agent_activity.d.cts +20 -6
  221. package/dist/voice/agent_activity.d.ts +20 -6
  222. package/dist/voice/agent_activity.d.ts.map +1 -1
  223. package/dist/voice/agent_activity.js +262 -37
  224. package/dist/voice/agent_activity.js.map +1 -1
  225. package/dist/voice/agent_session.cjs +105 -48
  226. package/dist/voice/agent_session.cjs.map +1 -1
  227. package/dist/voice/agent_session.d.cts +90 -20
  228. package/dist/voice/agent_session.d.ts +90 -20
  229. package/dist/voice/agent_session.d.ts.map +1 -1
  230. package/dist/voice/agent_session.js +105 -46
  231. package/dist/voice/agent_session.js.map +1 -1
  232. package/dist/voice/audio_recognition.cjs +287 -6
  233. package/dist/voice/audio_recognition.cjs.map +1 -1
  234. package/dist/voice/audio_recognition.d.cts +42 -3
  235. package/dist/voice/audio_recognition.d.ts +42 -3
  236. package/dist/voice/audio_recognition.d.ts.map +1 -1
  237. package/dist/voice/audio_recognition.js +289 -7
  238. package/dist/voice/audio_recognition.js.map +1 -1
  239. package/dist/voice/client_events.cjs +554 -0
  240. package/dist/voice/client_events.cjs.map +1 -0
  241. package/dist/voice/client_events.d.cts +195 -0
  242. package/dist/voice/client_events.d.ts +195 -0
  243. package/dist/voice/client_events.d.ts.map +1 -0
  244. package/dist/voice/client_events.js +548 -0
  245. package/dist/voice/client_events.js.map +1 -0
  246. package/dist/voice/events.cjs +1 -0
  247. package/dist/voice/events.cjs.map +1 -1
  248. package/dist/voice/events.d.cts +8 -5
  249. package/dist/voice/events.d.ts +8 -5
  250. package/dist/voice/events.d.ts.map +1 -1
  251. package/dist/voice/events.js +1 -0
  252. package/dist/voice/events.js.map +1 -1
  253. package/dist/voice/generation.cjs +43 -8
  254. package/dist/voice/generation.cjs.map +1 -1
  255. package/dist/voice/generation.d.cts +3 -3
  256. package/dist/voice/generation.d.ts +3 -3
  257. package/dist/voice/generation.d.ts.map +1 -1
  258. package/dist/voice/generation.js +43 -8
  259. package/dist/voice/generation.js.map +1 -1
  260. package/dist/voice/index.cjs +1 -0
  261. package/dist/voice/index.cjs.map +1 -1
  262. package/dist/voice/index.d.cts +1 -0
  263. package/dist/voice/index.d.ts +1 -0
  264. package/dist/voice/index.d.ts.map +1 -1
  265. package/dist/voice/index.js +1 -0
  266. package/dist/voice/index.js.map +1 -1
  267. package/dist/voice/report.cjs +20 -8
  268. package/dist/voice/report.cjs.map +1 -1
  269. package/dist/voice/report.d.cts +5 -0
  270. package/dist/voice/report.d.ts +5 -0
  271. package/dist/voice/report.d.ts.map +1 -1
  272. package/dist/voice/report.js +20 -8
  273. package/dist/voice/report.js.map +1 -1
  274. package/dist/voice/report.test.cjs +106 -0
  275. package/dist/voice/report.test.cjs.map +1 -0
  276. package/dist/voice/report.test.js +105 -0
  277. package/dist/voice/report.test.js.map +1 -0
  278. package/dist/voice/room_io/room_io.cjs +5 -39
  279. package/dist/voice/room_io/room_io.cjs.map +1 -1
  280. package/dist/voice/room_io/room_io.d.cts +4 -9
  281. package/dist/voice/room_io/room_io.d.ts +4 -9
  282. package/dist/voice/room_io/room_io.d.ts.map +1 -1
  283. package/dist/voice/room_io/room_io.js +5 -40
  284. package/dist/voice/room_io/room_io.js.map +1 -1
  285. package/dist/voice/turn_config/endpointing.cjs +33 -0
  286. package/dist/voice/turn_config/endpointing.cjs.map +1 -0
  287. package/dist/voice/turn_config/endpointing.d.cts +30 -0
  288. package/dist/voice/turn_config/endpointing.d.ts +30 -0
  289. package/dist/voice/turn_config/endpointing.d.ts.map +1 -0
  290. package/dist/voice/turn_config/endpointing.js +9 -0
  291. package/dist/voice/turn_config/endpointing.js.map +1 -0
  292. package/dist/voice/turn_config/interruption.cjs +37 -0
  293. package/dist/voice/turn_config/interruption.cjs.map +1 -0
  294. package/dist/voice/turn_config/interruption.d.cts +53 -0
  295. package/dist/voice/turn_config/interruption.d.ts +53 -0
  296. package/dist/voice/turn_config/interruption.d.ts.map +1 -0
  297. package/dist/voice/turn_config/interruption.js +13 -0
  298. package/dist/voice/turn_config/interruption.js.map +1 -0
  299. package/dist/voice/turn_config/turn_handling.cjs +35 -0
  300. package/dist/voice/turn_config/turn_handling.cjs.map +1 -0
  301. package/dist/voice/turn_config/turn_handling.d.cts +36 -0
  302. package/dist/voice/turn_config/turn_handling.d.ts +36 -0
  303. package/dist/voice/turn_config/turn_handling.d.ts.map +1 -0
  304. package/dist/voice/turn_config/turn_handling.js +11 -0
  305. package/dist/voice/turn_config/turn_handling.js.map +1 -0
  306. package/dist/voice/turn_config/utils.cjs +97 -0
  307. package/dist/voice/turn_config/utils.cjs.map +1 -0
  308. package/dist/voice/turn_config/utils.d.cts +25 -0
  309. package/dist/voice/turn_config/utils.d.ts +25 -0
  310. package/dist/voice/turn_config/utils.d.ts.map +1 -0
  311. package/dist/voice/turn_config/utils.js +73 -0
  312. package/dist/voice/turn_config/utils.js.map +1 -0
  313. package/dist/voice/turn_config/utils.test.cjs +86 -0
  314. package/dist/voice/turn_config/utils.test.cjs.map +1 -0
  315. package/dist/voice/turn_config/utils.test.js +85 -0
  316. package/dist/voice/turn_config/utils.test.js.map +1 -0
  317. package/dist/voice/wire_format.cjs +798 -0
  318. package/dist/voice/wire_format.cjs.map +1 -0
  319. package/dist/voice/wire_format.d.cts +5503 -0
  320. package/dist/voice/wire_format.d.ts +5503 -0
  321. package/dist/voice/wire_format.d.ts.map +1 -0
  322. package/dist/voice/wire_format.js +728 -0
  323. package/dist/voice/wire_format.js.map +1 -0
  324. package/package.json +2 -1
  325. package/src/constants.ts +13 -0
  326. package/src/inference/interruption/defaults.ts +51 -0
  327. package/src/inference/interruption/errors.ts +25 -0
  328. package/src/inference/interruption/http_transport.ts +187 -0
  329. package/src/inference/interruption/interruption_cache_entry.ts +50 -0
  330. package/src/inference/interruption/interruption_detector.ts +188 -0
  331. package/src/inference/interruption/interruption_stream.ts +467 -0
  332. package/src/inference/interruption/types.ts +84 -0
  333. package/src/inference/interruption/utils.test.ts +132 -0
  334. package/src/inference/interruption/utils.ts +137 -0
  335. package/src/inference/interruption/ws_transport.ts +402 -0
  336. package/src/inference/llm.ts +9 -12
  337. package/src/inference/stt.ts +10 -3
  338. package/src/inference/tts.ts +10 -3
  339. package/src/inference/utils.ts +29 -1
  340. package/src/llm/chat_context.ts +40 -2
  341. package/src/llm/index.ts +1 -0
  342. package/src/llm/llm.ts +16 -0
  343. package/src/llm/realtime.ts +4 -0
  344. package/src/metrics/base.ts +48 -1
  345. package/src/metrics/index.ts +11 -0
  346. package/src/metrics/model_usage.test.ts +545 -0
  347. package/src/metrics/model_usage.ts +262 -0
  348. package/src/metrics/usage_collector.ts +11 -0
  349. package/src/metrics/utils.ts +11 -0
  350. package/src/stream/multi_input_stream.test.ts +6 -1
  351. package/src/stream/stream_channel.ts +34 -2
  352. package/src/stt/stt.ts +38 -0
  353. package/src/telemetry/otel_http_exporter.ts +28 -5
  354. package/src/telemetry/trace_types.ts +11 -8
  355. package/src/telemetry/traces.ts +111 -54
  356. package/src/tts/tts.ts +69 -1
  357. package/src/voice/agent.ts +30 -3
  358. package/src/voice/agent_activity.ts +327 -28
  359. package/src/voice/agent_session.ts +207 -59
  360. package/src/voice/audio_recognition.ts +385 -9
  361. package/src/voice/client_events.ts +838 -0
  362. package/src/voice/events.ts +14 -4
  363. package/src/voice/generation.ts +52 -9
  364. package/src/voice/index.ts +1 -0
  365. package/src/voice/report.test.ts +117 -0
  366. package/src/voice/report.ts +29 -6
  367. package/src/voice/room_io/room_io.ts +7 -61
  368. package/src/voice/turn_config/endpointing.ts +33 -0
  369. package/src/voice/turn_config/interruption.ts +56 -0
  370. package/src/voice/turn_config/turn_handling.ts +45 -0
  371. package/src/voice/turn_config/utils.test.ts +100 -0
  372. package/src/voice/turn_config/utils.ts +103 -0
  373. package/src/voice/wire_format.ts +827 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/metrics/model_usage.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type {\n AgentMetrics,\n InterruptionMetrics,\n LLMMetrics,\n RealtimeModelMetrics,\n STTMetrics,\n TTSMetrics,\n} from './base.js';\n\nexport type LLMModelUsage = {\n type: 'llm_usage';\n /** The provider name (e.g., 'openai', 'anthropic'). */\n provider: string;\n /** The model name (e.g., 'gpt-4o', 'claude-3-5-sonnet'). */\n model: string;\n /** Total input tokens. */\n inputTokens: number;\n /** Input tokens served from cache. */\n inputCachedTokens: number;\n /** Input audio tokens (for multimodal models). */\n inputAudioTokens: number;\n /** Cached input audio tokens. */\n inputCachedAudioTokens: number;\n /** Input text tokens. */\n inputTextTokens: number;\n /** Cached input text tokens. */\n inputCachedTextTokens: number;\n /** Input image tokens (for multimodal models). */\n inputImageTokens: number;\n /** Cached input image tokens. */\n inputCachedImageTokens: number;\n /** Total output tokens. */\n outputTokens: number;\n /** Output audio tokens (for multimodal models). */\n outputAudioTokens: number;\n /** Output text tokens. */\n outputTextTokens: number;\n /** Total session connection duration in milliseconds (for session-based billing like xAI). */\n sessionDurationMs: number;\n};\n\nexport type TTSModelUsage = {\n type: 'tts_usage';\n /** The provider name (e.g., 'elevenlabs', 'cartesia'). */\n provider: string;\n /** The model name (e.g., 'eleven_turbo_v2', 'sonic'). */\n model: string;\n /** Input text tokens (for token-based TTS billing, e.g., OpenAI TTS). */\n inputTokens: number;\n /** Output audio tokens (for token-based TTS billing, e.g., OpenAI TTS). */\n outputTokens: number;\n /** Number of characters synthesized (for character-based TTS billing). */\n charactersCount: number;\n /**\n * Duration of generated audio in milliseconds.\n */\n audioDurationMs: number;\n};\n\nexport type STTModelUsage = {\n type: 'stt_usage';\n /** The provider name (e.g., 'deepgram', 'assemblyai'). */\n provider: string;\n /** The model name (e.g., 'nova-2', 'best'). */\n model: string;\n /** Input audio tokens (for token-based STT billing). */\n inputTokens: number;\n /** Output text tokens (for token-based STT billing). */\n outputTokens: number;\n /** Duration of processed audio in milliseconds. */\n audioDurationMs: number;\n};\n\nexport type InterruptionModelUsage = {\n type: 'interruption_usage';\n /** The provider name (e.g., 'livekit'). */\n provider: string;\n /** The model name (e.g., 'adaptive interruption'). */\n model: string;\n /** Total number of requests sent. */\n totalRequests: number;\n};\n\nexport type ModelUsage = LLMModelUsage | TTSModelUsage | STTModelUsage | InterruptionModelUsage;\n\nexport function filterZeroValues<T extends ModelUsage>(usage: T): Partial<T> {\n const result: Partial<T> = {} as Partial<T>;\n for (const [key, value] of Object.entries(usage)) {\n if (value !== 0 && value !== 0.0) {\n (result as Record<string, unknown>)[key] = value;\n }\n }\n return result;\n}\n\nexport class ModelUsageCollector {\n private llmUsage: Map<string, LLMModelUsage> = new Map();\n private ttsUsage: Map<string, TTSModelUsage> = new Map();\n private sttUsage: Map<string, STTModelUsage> = new Map();\n\n private interruptionUsage: Map<string, InterruptionModelUsage> = new Map();\n\n /** Extract provider and model from metrics metadata. */\n private extractProviderModel(\n metrics: LLMMetrics | STTMetrics | TTSMetrics | RealtimeModelMetrics | InterruptionMetrics,\n ): [string, string] {\n let provider = '';\n let model = '';\n if (metrics.metadata) {\n provider = metrics.metadata.modelProvider || '';\n model = metrics.metadata.modelName || '';\n }\n return [provider, model];\n }\n\n /** Get or create an LLMModelUsage for the given provider/model combination. */\n private getLLMUsage(provider: string, model: string): LLMModelUsage {\n const key = `${provider}:${model}`;\n let usage = this.llmUsage.get(key);\n if (!usage) {\n usage = {\n type: 'llm_usage',\n provider,\n model,\n inputTokens: 0,\n inputCachedTokens: 0,\n inputAudioTokens: 0,\n inputCachedAudioTokens: 0,\n inputTextTokens: 0,\n inputCachedTextTokens: 0,\n inputImageTokens: 0,\n inputCachedImageTokens: 0,\n outputTokens: 0,\n outputAudioTokens: 0,\n outputTextTokens: 0,\n sessionDurationMs: 0,\n };\n this.llmUsage.set(key, usage);\n }\n return usage;\n }\n\n /** Get or create a TTSModelUsage for the given provider/model combination. */\n private getTTSUsage(provider: string, model: string): TTSModelUsage {\n const key = `${provider}:${model}`;\n let usage = this.ttsUsage.get(key);\n if (!usage) {\n usage = {\n type: 'tts_usage',\n provider,\n model,\n inputTokens: 0,\n outputTokens: 0,\n charactersCount: 0,\n audioDurationMs: 0,\n };\n this.ttsUsage.set(key, usage);\n }\n return usage;\n }\n\n /** Get or create an STTModelUsage for the given provider/model combination. */\n private getSTTUsage(provider: string, model: string): STTModelUsage {\n const key = `${provider}:${model}`;\n let usage = this.sttUsage.get(key);\n if (!usage) {\n usage = {\n type: 'stt_usage',\n provider,\n model,\n inputTokens: 0,\n outputTokens: 0,\n audioDurationMs: 0,\n };\n this.sttUsage.set(key, usage);\n }\n return usage;\n }\n\n private getInterruptionUsage(provider: string, model: string): InterruptionModelUsage {\n const key = `${provider}:${model}`;\n let usage = this.interruptionUsage.get(key);\n if (!usage) {\n usage = {\n type: 'interruption_usage',\n provider,\n model,\n totalRequests: 0,\n };\n this.interruptionUsage.set(key, usage);\n }\n return usage;\n }\n\n /** Collect metrics and aggregate usage by model/provider. */\n collect(metrics: AgentMetrics): void {\n if (metrics.type === 'llm_metrics') {\n const [provider, model] = this.extractProviderModel(metrics);\n const usage = this.getLLMUsage(provider, model);\n usage.inputTokens += metrics.promptTokens;\n usage.inputCachedTokens += metrics.promptCachedTokens;\n usage.outputTokens += metrics.completionTokens;\n } else if (metrics.type === 'realtime_model_metrics') {\n const [provider, model] = this.extractProviderModel(metrics);\n const usage = this.getLLMUsage(provider, model);\n usage.inputTokens += metrics.inputTokens;\n usage.inputCachedTokens += metrics.inputTokenDetails.cachedTokens;\n\n usage.inputTextTokens += metrics.inputTokenDetails.textTokens;\n usage.inputCachedTextTokens += metrics.inputTokenDetails.cachedTokensDetails?.textTokens ?? 0;\n usage.inputImageTokens += metrics.inputTokenDetails.imageTokens;\n usage.inputCachedImageTokens +=\n metrics.inputTokenDetails.cachedTokensDetails?.imageTokens ?? 0;\n usage.inputAudioTokens += metrics.inputTokenDetails.audioTokens;\n usage.inputCachedAudioTokens +=\n metrics.inputTokenDetails.cachedTokensDetails?.audioTokens ?? 0;\n\n usage.outputTextTokens += metrics.outputTokenDetails.textTokens;\n usage.outputAudioTokens += metrics.outputTokenDetails.audioTokens;\n usage.outputTokens += metrics.outputTokens;\n usage.sessionDurationMs += metrics.sessionDurationMs ?? 0;\n } else if (metrics.type === 'tts_metrics') {\n const [provider, model] = this.extractProviderModel(metrics);\n const ttsUsage = this.getTTSUsage(provider, model);\n ttsUsage.inputTokens += metrics.inputTokens ?? 0;\n ttsUsage.outputTokens += metrics.outputTokens ?? 0;\n ttsUsage.charactersCount += metrics.charactersCount;\n ttsUsage.audioDurationMs += metrics.audioDurationMs;\n } else if (metrics.type === 'stt_metrics') {\n const [provider, model] = this.extractProviderModel(metrics);\n const sttUsage = this.getSTTUsage(provider, model);\n sttUsage.inputTokens += metrics.inputTokens ?? 0;\n sttUsage.outputTokens += metrics.outputTokens ?? 0;\n sttUsage.audioDurationMs += metrics.audioDurationMs;\n } else if (metrics.type === 'interruption_metrics') {\n const [provider, model] = this.extractProviderModel(metrics);\n const usage = this.getInterruptionUsage(provider, model);\n usage.totalRequests += metrics.numRequests;\n }\n // VAD and EOU metrics are not aggregated for usage tracking.\n }\n\n flatten(): ModelUsage[] {\n const result: ModelUsage[] = [];\n for (const u of this.llmUsage.values()) {\n result.push({ ...u });\n }\n for (const u of this.ttsUsage.values()) {\n result.push({ ...u });\n }\n for (const u of this.sttUsage.values()) {\n result.push({ ...u });\n }\n for (const u of this.interruptionUsage.values()) {\n result.push({ ...u });\n }\n return result;\n }\n}\n"],"mappings":"AAwFO,SAAS,iBAAuC,OAAsB;AAC3E,QAAM,SAAqB,CAAC;AAC5B,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,KAAK,GAAG;AAChD,QAAI,UAAU,KAAK,UAAU,GAAK;AAChC,MAAC,OAAmC,GAAG,IAAI;AAAA,IAC7C;AAAA,EACF;AACA,SAAO;AACT;AAEO,MAAM,oBAAoB;AAAA,EACvB,WAAuC,oBAAI,IAAI;AAAA,EAC/C,WAAuC,oBAAI,IAAI;AAAA,EAC/C,WAAuC,oBAAI,IAAI;AAAA,EAE/C,oBAAyD,oBAAI,IAAI;AAAA;AAAA,EAGjE,qBACN,SACkB;AAClB,QAAI,WAAW;AACf,QAAI,QAAQ;AACZ,QAAI,QAAQ,UAAU;AACpB,iBAAW,QAAQ,SAAS,iBAAiB;AAC7C,cAAQ,QAAQ,SAAS,aAAa;AAAA,IACxC;AACA,WAAO,CAAC,UAAU,KAAK;AAAA,EACzB;AAAA;AAAA,EAGQ,YAAY,UAAkB,OAA8B;AAClE,UAAM,MAAM,GAAG,QAAQ,IAAI,KAAK;AAChC,QAAI,QAAQ,KAAK,SAAS,IAAI,GAAG;AACjC,QAAI,CAAC,OAAO;AACV,cAAQ;AAAA,QACN,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,aAAa;AAAA,QACb,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,wBAAwB;AAAA,QACxB,iBAAiB;AAAA,QACjB,uBAAuB;AAAA,QACvB,kBAAkB;AAAA,QAClB,wBAAwB;AAAA,QACxB,cAAc;AAAA,QACd,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,mBAAmB;AAAA,MACrB;AACA,WAAK,SAAS,IAAI,KAAK,KAAK;AAAA,IAC9B;AACA,WAAO;AAAA,EACT;AAAA;AAAA,EAGQ,YAAY,UAAkB,OAA8B;AAClE,UAAM,MAAM,GAAG,QAAQ,IAAI,KAAK;AAChC,QAAI,QAAQ,KAAK,SAAS,IAAI,GAAG;AACjC,QAAI,CAAC,OAAO;AACV,cAAQ;AAAA,QACN,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,aAAa;AAAA,QACb,cAAc;AAAA,QACd,iBAAiB;AAAA,QACjB,iBAAiB;AAAA,MACnB;AACA,WAAK,SAAS,IAAI,KAAK,KAAK;AAAA,IAC9B;AACA,WAAO;AAAA,EACT;AAAA;AAAA,EAGQ,YAAY,UAAkB,OAA8B;AAClE,UAAM,MAAM,GAAG,QAAQ,IAAI,KAAK;AAChC,QAAI,QAAQ,KAAK,SAAS,IAAI,GAAG;AACjC,QAAI,CAAC,OAAO;AACV,cAAQ;AAAA,QACN,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,aAAa;AAAA,QACb,cAAc;AAAA,QACd,iBAAiB;AAAA,MACnB;AACA,WAAK,SAAS,IAAI,KAAK,KAAK;AAAA,IAC9B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,qBAAqB,UAAkB,OAAuC;AACpF,UAAM,MAAM,GAAG,QAAQ,IAAI,KAAK;AAChC,QAAI,QAAQ,KAAK,kBAAkB,IAAI,GAAG;AAC1C,QAAI,CAAC,OAAO;AACV,cAAQ;AAAA,QACN,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,eAAe;AAAA,MACjB;AACA,WAAK,kBAAkB,IAAI,KAAK,KAAK;AAAA,IACvC;AACA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,QAAQ,SAA6B;AAtMvC;AAuMI,QAAI,QAAQ,SAAS,eAAe;AAClC,YAAM,CAAC,UAAU,KAAK,IAAI,KAAK,qBAAqB,OAAO;AAC3D,YAAM,QAAQ,KAAK,YAAY,UAAU,KAAK;AAC9C,YAAM,eAAe,QAAQ;AAC7B,YAAM,qBAAqB,QAAQ;AACnC,YAAM,gBAAgB,QAAQ;AAAA,IAChC,WAAW,QAAQ,SAAS,0BAA0B;AACpD,YAAM,CAAC,UAAU,KAAK,IAAI,KAAK,qBAAqB,OAAO;AAC3D,YAAM,QAAQ,KAAK,YAAY,UAAU,KAAK;AAC9C,YAAM,eAAe,QAAQ;AAC7B,YAAM,qBAAqB,QAAQ,kBAAkB;AAErD,YAAM,mBAAmB,QAAQ,kBAAkB;AACnD,YAAM,2BAAyB,aAAQ,kBAAkB,wBAA1B,mBAA+C,eAAc;AAC5F,YAAM,oBAAoB,QAAQ,kBAAkB;AACpD,YAAM,4BACJ,aAAQ,kBAAkB,wBAA1B,mBAA+C,gBAAe;AAChE,YAAM,oBAAoB,QAAQ,kBAAkB;AACpD,YAAM,4BACJ,aAAQ,kBAAkB,wBAA1B,mBAA+C,gBAAe;AAEhE,YAAM,oBAAoB,QAAQ,mBAAmB;AACrD,YAAM,qBAAqB,QAAQ,mBAAmB;AACtD,YAAM,gBAAgB,QAAQ;AAC9B,YAAM,qBAAqB,QAAQ,qBAAqB;AAAA,IAC1D,WAAW,QAAQ,SAAS,eAAe;AACzC,YAAM,CAAC,UAAU,KAAK,IAAI,KAAK,qBAAqB,OAAO;AAC3D,YAAM,WAAW,KAAK,YAAY,UAAU,KAAK;AACjD,eAAS,eAAe,QAAQ,eAAe;AAC/C,eAAS,gBAAgB,QAAQ,gBAAgB;AACjD,eAAS,mBAAmB,QAAQ;AACpC,eAAS,mBAAmB,QAAQ;AAAA,IACtC,WAAW,QAAQ,SAAS,eAAe;AACzC,YAAM,CAAC,UAAU,KAAK,IAAI,KAAK,qBAAqB,OAAO;AAC3D,YAAM,WAAW,KAAK,YAAY,UAAU,KAAK;AACjD,eAAS,eAAe,QAAQ,eAAe;AAC/C,eAAS,gBAAgB,QAAQ,gBAAgB;AACjD,eAAS,mBAAmB,QAAQ;AAAA,IACtC,WAAW,QAAQ,SAAS,wBAAwB;AAClD,YAAM,CAAC,UAAU,KAAK,IAAI,KAAK,qBAAqB,OAAO;AAC3D,YAAM,QAAQ,KAAK,qBAAqB,UAAU,KAAK;AACvD,YAAM,iBAAiB,QAAQ;AAAA,IACjC;AAAA,EAEF;AAAA,EAEA,UAAwB;AACtB,UAAM,SAAuB,CAAC;AAC9B,eAAW,KAAK,KAAK,SAAS,OAAO,GAAG;AACtC,aAAO,KAAK,EAAE,GAAG,EAAE,CAAC;AAAA,IACtB;AACA,eAAW,KAAK,KAAK,SAAS,OAAO,GAAG;AACtC,aAAO,KAAK,EAAE,GAAG,EAAE,CAAC;AAAA,IACtB;AACA,eAAW,KAAK,KAAK,SAAS,OAAO,GAAG;AACtC,aAAO,KAAK,EAAE,GAAG,EAAE,CAAC;AAAA,IACtB;AACA,eAAW,KAAK,KAAK,kBAAkB,OAAO,GAAG;AAC/C,aAAO,KAAK,EAAE,GAAG,EAAE,CAAC;AAAA,IACtB;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -0,0 +1,474 @@
1
+ "use strict";
2
+ var import_vitest = require("vitest");
3
+ var import_model_usage = require("./model_usage.cjs");
4
+ (0, import_vitest.describe)("model_usage", () => {
5
+ (0, import_vitest.describe)("filterZeroValues", () => {
6
+ (0, import_vitest.it)("should filter out zero values from LLMModelUsage", () => {
7
+ const usage = {
8
+ type: "llm_usage",
9
+ provider: "openai",
10
+ model: "gpt-4o",
11
+ inputTokens: 100,
12
+ inputCachedTokens: 0,
13
+ inputAudioTokens: 0,
14
+ inputCachedAudioTokens: 0,
15
+ inputTextTokens: 0,
16
+ inputCachedTextTokens: 0,
17
+ inputImageTokens: 0,
18
+ inputCachedImageTokens: 0,
19
+ outputTokens: 50,
20
+ outputAudioTokens: 0,
21
+ outputTextTokens: 0,
22
+ sessionDurationMs: 0
23
+ };
24
+ const filtered = (0, import_model_usage.filterZeroValues)(usage);
25
+ (0, import_vitest.expect)(filtered.type).toBe("llm_usage");
26
+ (0, import_vitest.expect)(filtered.provider).toBe("openai");
27
+ (0, import_vitest.expect)(filtered.model).toBe("gpt-4o");
28
+ (0, import_vitest.expect)(filtered.inputTokens).toBe(100);
29
+ (0, import_vitest.expect)(filtered.outputTokens).toBe(50);
30
+ (0, import_vitest.expect)(filtered.inputCachedTokens).toBeUndefined();
31
+ (0, import_vitest.expect)(filtered.inputAudioTokens).toBeUndefined();
32
+ (0, import_vitest.expect)(filtered.sessionDurationMs).toBeUndefined();
33
+ });
34
+ (0, import_vitest.it)("should filter out zero values from TTSModelUsage", () => {
35
+ const usage = {
36
+ type: "tts_usage",
37
+ provider: "elevenlabs",
38
+ model: "eleven_turbo_v2",
39
+ inputTokens: 0,
40
+ outputTokens: 0,
41
+ charactersCount: 500,
42
+ audioDurationMs: 3e3
43
+ };
44
+ const filtered = (0, import_model_usage.filterZeroValues)(usage);
45
+ (0, import_vitest.expect)(filtered.type).toBe("tts_usage");
46
+ (0, import_vitest.expect)(filtered.provider).toBe("elevenlabs");
47
+ (0, import_vitest.expect)(filtered.charactersCount).toBe(500);
48
+ (0, import_vitest.expect)(filtered.audioDurationMs).toBe(3e3);
49
+ (0, import_vitest.expect)(filtered.inputTokens).toBeUndefined();
50
+ (0, import_vitest.expect)(filtered.outputTokens).toBeUndefined();
51
+ });
52
+ (0, import_vitest.it)("should keep all values when none are zero", () => {
53
+ const usage = {
54
+ type: "stt_usage",
55
+ provider: "deepgram",
56
+ model: "nova-2",
57
+ inputTokens: 10,
58
+ outputTokens: 20,
59
+ audioDurationMs: 5e3
60
+ };
61
+ const filtered = (0, import_model_usage.filterZeroValues)(usage);
62
+ (0, import_vitest.expect)(Object.keys(filtered)).toHaveLength(6);
63
+ (0, import_vitest.expect)(filtered).toEqual(usage);
64
+ });
65
+ });
66
+ (0, import_vitest.describe)("ModelUsageCollector", () => {
67
+ let collector;
68
+ (0, import_vitest.beforeEach)(() => {
69
+ collector = new import_model_usage.ModelUsageCollector();
70
+ });
71
+ (0, import_vitest.describe)("collect LLM metrics", () => {
72
+ (0, import_vitest.it)("should aggregate LLM metrics by provider and model", () => {
73
+ const metrics1 = {
74
+ type: "llm_metrics",
75
+ label: "test",
76
+ requestId: "req1",
77
+ timestamp: Date.now(),
78
+ durationMs: 100,
79
+ ttftMs: 50,
80
+ cancelled: false,
81
+ completionTokens: 100,
82
+ promptTokens: 200,
83
+ promptCachedTokens: 50,
84
+ totalTokens: 300,
85
+ tokensPerSecond: 10,
86
+ metadata: {
87
+ modelProvider: "openai",
88
+ modelName: "gpt-4o"
89
+ }
90
+ };
91
+ const metrics2 = {
92
+ type: "llm_metrics",
93
+ label: "test",
94
+ requestId: "req2",
95
+ timestamp: Date.now(),
96
+ durationMs: 150,
97
+ ttftMs: 60,
98
+ cancelled: false,
99
+ completionTokens: 150,
100
+ promptTokens: 300,
101
+ promptCachedTokens: 75,
102
+ totalTokens: 450,
103
+ tokensPerSecond: 12,
104
+ metadata: {
105
+ modelProvider: "openai",
106
+ modelName: "gpt-4o"
107
+ }
108
+ };
109
+ collector.collect(metrics1);
110
+ collector.collect(metrics2);
111
+ const usage = collector.flatten();
112
+ (0, import_vitest.expect)(usage).toHaveLength(1);
113
+ const llmUsage = usage[0];
114
+ (0, import_vitest.expect)(llmUsage.type).toBe("llm_usage");
115
+ (0, import_vitest.expect)(llmUsage.provider).toBe("openai");
116
+ (0, import_vitest.expect)(llmUsage.model).toBe("gpt-4o");
117
+ (0, import_vitest.expect)(llmUsage.inputTokens).toBe(500);
118
+ (0, import_vitest.expect)(llmUsage.inputCachedTokens).toBe(125);
119
+ (0, import_vitest.expect)(llmUsage.outputTokens).toBe(250);
120
+ });
121
+ (0, import_vitest.it)("should separate metrics by different providers", () => {
122
+ const openaiMetrics = {
123
+ type: "llm_metrics",
124
+ label: "test",
125
+ requestId: "req1",
126
+ timestamp: Date.now(),
127
+ durationMs: 100,
128
+ ttftMs: 50,
129
+ cancelled: false,
130
+ completionTokens: 100,
131
+ promptTokens: 200,
132
+ promptCachedTokens: 0,
133
+ totalTokens: 300,
134
+ tokensPerSecond: 10,
135
+ metadata: {
136
+ modelProvider: "openai",
137
+ modelName: "gpt-4o"
138
+ }
139
+ };
140
+ const anthropicMetrics = {
141
+ type: "llm_metrics",
142
+ label: "test",
143
+ requestId: "req2",
144
+ timestamp: Date.now(),
145
+ durationMs: 120,
146
+ ttftMs: 55,
147
+ cancelled: false,
148
+ completionTokens: 80,
149
+ promptTokens: 150,
150
+ promptCachedTokens: 0,
151
+ totalTokens: 230,
152
+ tokensPerSecond: 8,
153
+ metadata: {
154
+ modelProvider: "anthropic",
155
+ modelName: "claude-3-5-sonnet"
156
+ }
157
+ };
158
+ collector.collect(openaiMetrics);
159
+ collector.collect(anthropicMetrics);
160
+ const usage = collector.flatten();
161
+ (0, import_vitest.expect)(usage).toHaveLength(2);
162
+ const openaiUsage = usage.find(
163
+ (u) => u.type === "llm_usage" && u.provider === "openai"
164
+ );
165
+ const anthropicUsage = usage.find(
166
+ (u) => u.type === "llm_usage" && u.provider === "anthropic"
167
+ );
168
+ (0, import_vitest.expect)(openaiUsage.inputTokens).toBe(200);
169
+ (0, import_vitest.expect)(openaiUsage.outputTokens).toBe(100);
170
+ (0, import_vitest.expect)(anthropicUsage.inputTokens).toBe(150);
171
+ (0, import_vitest.expect)(anthropicUsage.outputTokens).toBe(80);
172
+ });
173
+ });
174
+ (0, import_vitest.describe)("collect TTS metrics", () => {
175
+ (0, import_vitest.it)("should aggregate TTS metrics by provider and model", () => {
176
+ const metrics1 = {
177
+ type: "tts_metrics",
178
+ label: "test",
179
+ requestId: "req1",
180
+ timestamp: Date.now(),
181
+ ttfbMs: 100,
182
+ durationMs: 500,
183
+ audioDurationMs: 3e3,
184
+ cancelled: false,
185
+ charactersCount: 100,
186
+ inputTokens: 10,
187
+ outputTokens: 20,
188
+ streamed: true,
189
+ metadata: {
190
+ modelProvider: "elevenlabs",
191
+ modelName: "eleven_turbo_v2"
192
+ }
193
+ };
194
+ const metrics2 = {
195
+ type: "tts_metrics",
196
+ label: "test",
197
+ requestId: "req2",
198
+ timestamp: Date.now(),
199
+ ttfbMs: 120,
200
+ durationMs: 600,
201
+ audioDurationMs: 4e3,
202
+ cancelled: false,
203
+ charactersCount: 200,
204
+ inputTokens: 15,
205
+ outputTokens: 25,
206
+ streamed: true,
207
+ metadata: {
208
+ modelProvider: "elevenlabs",
209
+ modelName: "eleven_turbo_v2"
210
+ }
211
+ };
212
+ collector.collect(metrics1);
213
+ collector.collect(metrics2);
214
+ const usage = collector.flatten();
215
+ (0, import_vitest.expect)(usage).toHaveLength(1);
216
+ const ttsUsage = usage[0];
217
+ (0, import_vitest.expect)(ttsUsage.type).toBe("tts_usage");
218
+ (0, import_vitest.expect)(ttsUsage.provider).toBe("elevenlabs");
219
+ (0, import_vitest.expect)(ttsUsage.model).toBe("eleven_turbo_v2");
220
+ (0, import_vitest.expect)(ttsUsage.charactersCount).toBe(300);
221
+ (0, import_vitest.expect)(ttsUsage.audioDurationMs).toBe(7e3);
222
+ (0, import_vitest.expect)(ttsUsage.inputTokens).toBe(25);
223
+ (0, import_vitest.expect)(ttsUsage.outputTokens).toBe(45);
224
+ });
225
+ });
226
+ (0, import_vitest.describe)("collect STT metrics", () => {
227
+ (0, import_vitest.it)("should aggregate STT metrics by provider and model", () => {
228
+ const metrics1 = {
229
+ type: "stt_metrics",
230
+ label: "test",
231
+ requestId: "req1",
232
+ timestamp: Date.now(),
233
+ durationMs: 0,
234
+ audioDurationMs: 5e3,
235
+ inputTokens: 50,
236
+ outputTokens: 100,
237
+ streamed: true,
238
+ metadata: {
239
+ modelProvider: "deepgram",
240
+ modelName: "nova-2"
241
+ }
242
+ };
243
+ const metrics2 = {
244
+ type: "stt_metrics",
245
+ label: "test",
246
+ requestId: "req2",
247
+ timestamp: Date.now(),
248
+ durationMs: 0,
249
+ audioDurationMs: 3e3,
250
+ inputTokens: 30,
251
+ outputTokens: 60,
252
+ streamed: true,
253
+ metadata: {
254
+ modelProvider: "deepgram",
255
+ modelName: "nova-2"
256
+ }
257
+ };
258
+ collector.collect(metrics1);
259
+ collector.collect(metrics2);
260
+ const usage = collector.flatten();
261
+ (0, import_vitest.expect)(usage).toHaveLength(1);
262
+ const sttUsage = usage[0];
263
+ (0, import_vitest.expect)(sttUsage.type).toBe("stt_usage");
264
+ (0, import_vitest.expect)(sttUsage.provider).toBe("deepgram");
265
+ (0, import_vitest.expect)(sttUsage.model).toBe("nova-2");
266
+ (0, import_vitest.expect)(sttUsage.audioDurationMs).toBe(8e3);
267
+ (0, import_vitest.expect)(sttUsage.inputTokens).toBe(80);
268
+ (0, import_vitest.expect)(sttUsage.outputTokens).toBe(160);
269
+ });
270
+ });
271
+ (0, import_vitest.describe)("collect realtime model metrics", () => {
272
+ (0, import_vitest.it)("should aggregate realtime model metrics with detailed token breakdown", () => {
273
+ const metrics = {
274
+ type: "realtime_model_metrics",
275
+ label: "test",
276
+ requestId: "req1",
277
+ timestamp: Date.now(),
278
+ durationMs: 1e3,
279
+ ttftMs: 100,
280
+ cancelled: false,
281
+ inputTokens: 500,
282
+ outputTokens: 300,
283
+ totalTokens: 800,
284
+ tokensPerSecond: 10,
285
+ sessionDurationMs: 5e3,
286
+ inputTokenDetails: {
287
+ audioTokens: 200,
288
+ textTokens: 250,
289
+ imageTokens: 50,
290
+ cachedTokens: 100,
291
+ cachedTokensDetails: {
292
+ audioTokens: 30,
293
+ textTokens: 50,
294
+ imageTokens: 20
295
+ }
296
+ },
297
+ outputTokenDetails: {
298
+ textTokens: 200,
299
+ audioTokens: 100,
300
+ imageTokens: 0
301
+ },
302
+ metadata: {
303
+ modelProvider: "openai",
304
+ modelName: "gpt-4o-realtime"
305
+ }
306
+ };
307
+ collector.collect(metrics);
308
+ const usage = collector.flatten();
309
+ (0, import_vitest.expect)(usage).toHaveLength(1);
310
+ const llmUsage = usage[0];
311
+ (0, import_vitest.expect)(llmUsage.type).toBe("llm_usage");
312
+ (0, import_vitest.expect)(llmUsage.provider).toBe("openai");
313
+ (0, import_vitest.expect)(llmUsage.model).toBe("gpt-4o-realtime");
314
+ (0, import_vitest.expect)(llmUsage.inputTokens).toBe(500);
315
+ (0, import_vitest.expect)(llmUsage.inputCachedTokens).toBe(100);
316
+ (0, import_vitest.expect)(llmUsage.inputAudioTokens).toBe(200);
317
+ (0, import_vitest.expect)(llmUsage.inputCachedAudioTokens).toBe(30);
318
+ (0, import_vitest.expect)(llmUsage.inputTextTokens).toBe(250);
319
+ (0, import_vitest.expect)(llmUsage.inputCachedTextTokens).toBe(50);
320
+ (0, import_vitest.expect)(llmUsage.inputImageTokens).toBe(50);
321
+ (0, import_vitest.expect)(llmUsage.inputCachedImageTokens).toBe(20);
322
+ (0, import_vitest.expect)(llmUsage.outputTokens).toBe(300);
323
+ (0, import_vitest.expect)(llmUsage.outputTextTokens).toBe(200);
324
+ (0, import_vitest.expect)(llmUsage.outputAudioTokens).toBe(100);
325
+ (0, import_vitest.expect)(llmUsage.sessionDurationMs).toBe(5e3);
326
+ });
327
+ });
328
+ (0, import_vitest.describe)("mixed metrics collection", () => {
329
+ (0, import_vitest.it)("should collect and separate LLM, TTS, and STT metrics", () => {
330
+ const llmMetrics = {
331
+ type: "llm_metrics",
332
+ label: "test",
333
+ requestId: "req1",
334
+ timestamp: Date.now(),
335
+ durationMs: 100,
336
+ ttftMs: 50,
337
+ cancelled: false,
338
+ completionTokens: 100,
339
+ promptTokens: 200,
340
+ promptCachedTokens: 0,
341
+ totalTokens: 300,
342
+ tokensPerSecond: 10,
343
+ metadata: {
344
+ modelProvider: "openai",
345
+ modelName: "gpt-4o"
346
+ }
347
+ };
348
+ const ttsMetrics = {
349
+ type: "tts_metrics",
350
+ label: "test",
351
+ requestId: "req2",
352
+ timestamp: Date.now(),
353
+ ttfbMs: 100,
354
+ durationMs: 500,
355
+ audioDurationMs: 3e3,
356
+ cancelled: false,
357
+ charactersCount: 100,
358
+ streamed: true,
359
+ metadata: {
360
+ modelProvider: "elevenlabs",
361
+ modelName: "eleven_turbo_v2"
362
+ }
363
+ };
364
+ const sttMetrics = {
365
+ type: "stt_metrics",
366
+ label: "test",
367
+ requestId: "req3",
368
+ timestamp: Date.now(),
369
+ durationMs: 0,
370
+ audioDurationMs: 5e3,
371
+ streamed: true,
372
+ metadata: {
373
+ modelProvider: "deepgram",
374
+ modelName: "nova-2"
375
+ }
376
+ };
377
+ collector.collect(llmMetrics);
378
+ collector.collect(ttsMetrics);
379
+ collector.collect(sttMetrics);
380
+ const usage = collector.flatten();
381
+ (0, import_vitest.expect)(usage).toHaveLength(3);
382
+ const llmUsage = usage.find((u) => u.type === "llm_usage");
383
+ const ttsUsage = usage.find((u) => u.type === "tts_usage");
384
+ const sttUsage = usage.find((u) => u.type === "stt_usage");
385
+ (0, import_vitest.expect)(llmUsage).toBeDefined();
386
+ (0, import_vitest.expect)(ttsUsage).toBeDefined();
387
+ (0, import_vitest.expect)(sttUsage).toBeDefined();
388
+ });
389
+ });
390
+ (0, import_vitest.describe)("flatten returns copies", () => {
391
+ (0, import_vitest.it)("should return deep copies of usage objects", () => {
392
+ const metrics = {
393
+ type: "llm_metrics",
394
+ label: "test",
395
+ requestId: "req1",
396
+ timestamp: Date.now(),
397
+ durationMs: 100,
398
+ ttftMs: 50,
399
+ cancelled: false,
400
+ completionTokens: 100,
401
+ promptTokens: 200,
402
+ promptCachedTokens: 0,
403
+ totalTokens: 300,
404
+ tokensPerSecond: 10,
405
+ metadata: {
406
+ modelProvider: "openai",
407
+ modelName: "gpt-4o"
408
+ }
409
+ };
410
+ collector.collect(metrics);
411
+ const usage1 = collector.flatten();
412
+ const usage2 = collector.flatten();
413
+ (0, import_vitest.expect)(usage1[0]).toEqual(usage2[0]);
414
+ (0, import_vitest.expect)(usage1[0]).not.toBe(usage2[0]);
415
+ usage1[0].inputTokens = 9999;
416
+ (0, import_vitest.expect)(usage2[0].inputTokens).toBe(200);
417
+ });
418
+ });
419
+ (0, import_vitest.describe)("handles missing metadata", () => {
420
+ (0, import_vitest.it)("should use empty strings when metadata is missing", () => {
421
+ const metrics = {
422
+ type: "llm_metrics",
423
+ label: "test",
424
+ requestId: "req1",
425
+ timestamp: Date.now(),
426
+ durationMs: 100,
427
+ ttftMs: 50,
428
+ cancelled: false,
429
+ completionTokens: 100,
430
+ promptTokens: 200,
431
+ promptCachedTokens: 0,
432
+ totalTokens: 300,
433
+ tokensPerSecond: 10
434
+ // No metadata
435
+ };
436
+ collector.collect(metrics);
437
+ const usage = collector.flatten();
438
+ (0, import_vitest.expect)(usage).toHaveLength(1);
439
+ const llmUsage = usage[0];
440
+ (0, import_vitest.expect)(llmUsage.provider).toBe("");
441
+ (0, import_vitest.expect)(llmUsage.model).toBe("");
442
+ });
443
+ });
444
+ (0, import_vitest.describe)("ignores VAD and EOU metrics", () => {
445
+ (0, import_vitest.it)("should not collect VAD metrics", () => {
446
+ const vadMetrics = {
447
+ type: "vad_metrics",
448
+ label: "test",
449
+ timestamp: Date.now(),
450
+ idleTimeMs: 100,
451
+ inferenceDurationTotalMs: 50,
452
+ inferenceCount: 10
453
+ };
454
+ collector.collect(vadMetrics);
455
+ const usage = collector.flatten();
456
+ (0, import_vitest.expect)(usage).toHaveLength(0);
457
+ });
458
+ (0, import_vitest.it)("should not collect EOU metrics", () => {
459
+ const eouMetrics = {
460
+ type: "eou_metrics",
461
+ timestamp: Date.now(),
462
+ endOfUtteranceDelayMs: 100,
463
+ transcriptionDelayMs: 50,
464
+ onUserTurnCompletedDelayMs: 30,
465
+ lastSpeakingTimeMs: Date.now()
466
+ };
467
+ collector.collect(eouMetrics);
468
+ const usage = collector.flatten();
469
+ (0, import_vitest.expect)(usage).toHaveLength(0);
470
+ });
471
+ });
472
+ });
473
+ });
474
+ //# sourceMappingURL=model_usage.test.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/metrics/model_usage.test.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { beforeEach, describe, expect, it } from 'vitest';\nimport type { LLMMetrics, RealtimeModelMetrics, STTMetrics, TTSMetrics } from './base.js';\nimport {\n type LLMModelUsage,\n ModelUsageCollector,\n type STTModelUsage,\n type TTSModelUsage,\n filterZeroValues,\n} from './model_usage.js';\n\ndescribe('model_usage', () => {\n describe('filterZeroValues', () => {\n it('should filter out zero values from LLMModelUsage', () => {\n const usage: LLMModelUsage = {\n type: 'llm_usage',\n provider: 'openai',\n model: 'gpt-4o',\n inputTokens: 100,\n inputCachedTokens: 0,\n inputAudioTokens: 0,\n inputCachedAudioTokens: 0,\n inputTextTokens: 0,\n inputCachedTextTokens: 0,\n inputImageTokens: 0,\n inputCachedImageTokens: 0,\n outputTokens: 50,\n outputAudioTokens: 0,\n outputTextTokens: 0,\n sessionDurationMs: 0,\n };\n\n const filtered = filterZeroValues(usage);\n\n expect(filtered.type).toBe('llm_usage');\n expect(filtered.provider).toBe('openai');\n expect(filtered.model).toBe('gpt-4o');\n expect(filtered.inputTokens).toBe(100);\n expect(filtered.outputTokens).toBe(50);\n // Zero values should be filtered out\n expect(filtered.inputCachedTokens).toBeUndefined();\n expect(filtered.inputAudioTokens).toBeUndefined();\n expect(filtered.sessionDurationMs).toBeUndefined();\n });\n\n it('should filter out zero values from TTSModelUsage', () => {\n const usage: TTSModelUsage = {\n type: 'tts_usage',\n provider: 'elevenlabs',\n model: 'eleven_turbo_v2',\n inputTokens: 0,\n outputTokens: 0,\n charactersCount: 500,\n audioDurationMs: 3000,\n };\n\n const filtered = filterZeroValues(usage);\n\n expect(filtered.type).toBe('tts_usage');\n expect(filtered.provider).toBe('elevenlabs');\n expect(filtered.charactersCount).toBe(500);\n expect(filtered.audioDurationMs).toBe(3000);\n expect(filtered.inputTokens).toBeUndefined();\n expect(filtered.outputTokens).toBeUndefined();\n });\n\n it('should keep all values when none are zero', () => {\n const usage: STTModelUsage = {\n type: 'stt_usage',\n provider: 'deepgram',\n model: 'nova-2',\n inputTokens: 10,\n outputTokens: 20,\n audioDurationMs: 5000,\n };\n\n const filtered = filterZeroValues(usage);\n\n expect(Object.keys(filtered)).toHaveLength(6);\n expect(filtered).toEqual(usage);\n });\n });\n\n describe('ModelUsageCollector', () => {\n let collector: ModelUsageCollector;\n\n beforeEach(() => {\n collector = new ModelUsageCollector();\n });\n\n describe('collect LLM metrics', () => {\n it('should aggregate LLM metrics by provider and model', () => {\n const metrics1: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 50,\n totalTokens: 300,\n tokensPerSecond: 10,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n const metrics2: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n durationMs: 150,\n ttftMs: 60,\n cancelled: false,\n completionTokens: 150,\n promptTokens: 300,\n promptCachedTokens: 75,\n totalTokens: 450,\n tokensPerSecond: 12,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n collector.collect(metrics1);\n collector.collect(metrics2);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const llmUsage = usage[0] as LLMModelUsage;\n expect(llmUsage.type).toBe('llm_usage');\n expect(llmUsage.provider).toBe('openai');\n expect(llmUsage.model).toBe('gpt-4o');\n expect(llmUsage.inputTokens).toBe(500); // 200 + 300\n expect(llmUsage.inputCachedTokens).toBe(125); // 50 + 75\n expect(llmUsage.outputTokens).toBe(250); // 100 + 150\n });\n\n it('should separate metrics by different providers', () => {\n const openaiMetrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 0,\n totalTokens: 300,\n tokensPerSecond: 10,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n const anthropicMetrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n durationMs: 120,\n ttftMs: 55,\n cancelled: false,\n completionTokens: 80,\n promptTokens: 150,\n promptCachedTokens: 0,\n totalTokens: 230,\n tokensPerSecond: 8,\n metadata: {\n modelProvider: 'anthropic',\n modelName: 'claude-3-5-sonnet',\n },\n };\n\n collector.collect(openaiMetrics);\n collector.collect(anthropicMetrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(2);\n\n const openaiUsage = usage.find(\n (u) => u.type === 'llm_usage' && u.provider === 'openai',\n ) as LLMModelUsage;\n const anthropicUsage = usage.find(\n (u) => u.type === 'llm_usage' && u.provider === 'anthropic',\n ) as LLMModelUsage;\n\n expect(openaiUsage.inputTokens).toBe(200);\n expect(openaiUsage.outputTokens).toBe(100);\n expect(anthropicUsage.inputTokens).toBe(150);\n expect(anthropicUsage.outputTokens).toBe(80);\n });\n });\n\n describe('collect TTS metrics', () => {\n it('should aggregate TTS metrics by provider and model', () => {\n const metrics1: TTSMetrics = {\n type: 'tts_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n ttfbMs: 100,\n durationMs: 500,\n audioDurationMs: 3000,\n cancelled: false,\n charactersCount: 100,\n inputTokens: 10,\n outputTokens: 20,\n streamed: true,\n metadata: {\n modelProvider: 'elevenlabs',\n modelName: 'eleven_turbo_v2',\n },\n };\n\n const metrics2: TTSMetrics = {\n type: 'tts_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n ttfbMs: 120,\n durationMs: 600,\n audioDurationMs: 4000,\n cancelled: false,\n charactersCount: 200,\n inputTokens: 15,\n outputTokens: 25,\n streamed: true,\n metadata: {\n modelProvider: 'elevenlabs',\n modelName: 'eleven_turbo_v2',\n },\n };\n\n collector.collect(metrics1);\n collector.collect(metrics2);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const ttsUsage = usage[0] as TTSModelUsage;\n expect(ttsUsage.type).toBe('tts_usage');\n expect(ttsUsage.provider).toBe('elevenlabs');\n expect(ttsUsage.model).toBe('eleven_turbo_v2');\n expect(ttsUsage.charactersCount).toBe(300); // 100 + 200\n expect(ttsUsage.audioDurationMs).toBe(7000); // 3000 + 4000\n expect(ttsUsage.inputTokens).toBe(25); // 10 + 15\n expect(ttsUsage.outputTokens).toBe(45); // 20 + 25\n });\n });\n\n describe('collect STT metrics', () => {\n it('should aggregate STT metrics by provider and model', () => {\n const metrics1: STTMetrics = {\n type: 'stt_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 0,\n audioDurationMs: 5000,\n inputTokens: 50,\n outputTokens: 100,\n streamed: true,\n metadata: {\n modelProvider: 'deepgram',\n modelName: 'nova-2',\n },\n };\n\n const metrics2: STTMetrics = {\n type: 'stt_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n durationMs: 0,\n audioDurationMs: 3000,\n inputTokens: 30,\n outputTokens: 60,\n streamed: true,\n metadata: {\n modelProvider: 'deepgram',\n modelName: 'nova-2',\n },\n };\n\n collector.collect(metrics1);\n collector.collect(metrics2);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const sttUsage = usage[0] as STTModelUsage;\n expect(sttUsage.type).toBe('stt_usage');\n expect(sttUsage.provider).toBe('deepgram');\n expect(sttUsage.model).toBe('nova-2');\n expect(sttUsage.audioDurationMs).toBe(8000); // 5000 + 3000\n expect(sttUsage.inputTokens).toBe(80); // 50 + 30\n expect(sttUsage.outputTokens).toBe(160); // 100 + 60\n });\n });\n\n describe('collect realtime model metrics', () => {\n it('should aggregate realtime model metrics with detailed token breakdown', () => {\n const metrics: RealtimeModelMetrics = {\n type: 'realtime_model_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 1000,\n ttftMs: 100,\n cancelled: false,\n inputTokens: 500,\n outputTokens: 300,\n totalTokens: 800,\n tokensPerSecond: 10,\n sessionDurationMs: 5000,\n inputTokenDetails: {\n audioTokens: 200,\n textTokens: 250,\n imageTokens: 50,\n cachedTokens: 100,\n cachedTokensDetails: {\n audioTokens: 30,\n textTokens: 50,\n imageTokens: 20,\n },\n },\n outputTokenDetails: {\n textTokens: 200,\n audioTokens: 100,\n imageTokens: 0,\n },\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o-realtime',\n },\n };\n\n collector.collect(metrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const llmUsage = usage[0] as LLMModelUsage;\n expect(llmUsage.type).toBe('llm_usage');\n expect(llmUsage.provider).toBe('openai');\n expect(llmUsage.model).toBe('gpt-4o-realtime');\n expect(llmUsage.inputTokens).toBe(500);\n expect(llmUsage.inputCachedTokens).toBe(100);\n expect(llmUsage.inputAudioTokens).toBe(200);\n expect(llmUsage.inputCachedAudioTokens).toBe(30);\n expect(llmUsage.inputTextTokens).toBe(250);\n expect(llmUsage.inputCachedTextTokens).toBe(50);\n expect(llmUsage.inputImageTokens).toBe(50);\n expect(llmUsage.inputCachedImageTokens).toBe(20);\n expect(llmUsage.outputTokens).toBe(300);\n expect(llmUsage.outputTextTokens).toBe(200);\n expect(llmUsage.outputAudioTokens).toBe(100);\n expect(llmUsage.sessionDurationMs).toBe(5000);\n });\n });\n\n describe('mixed metrics collection', () => {\n it('should collect and separate LLM, TTS, and STT metrics', () => {\n const llmMetrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 0,\n totalTokens: 300,\n tokensPerSecond: 10,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n const ttsMetrics: TTSMetrics = {\n type: 'tts_metrics',\n label: 'test',\n requestId: 'req2',\n timestamp: Date.now(),\n ttfbMs: 100,\n durationMs: 500,\n audioDurationMs: 3000,\n cancelled: false,\n charactersCount: 100,\n streamed: true,\n metadata: {\n modelProvider: 'elevenlabs',\n modelName: 'eleven_turbo_v2',\n },\n };\n\n const sttMetrics: STTMetrics = {\n type: 'stt_metrics',\n label: 'test',\n requestId: 'req3',\n timestamp: Date.now(),\n durationMs: 0,\n audioDurationMs: 5000,\n streamed: true,\n metadata: {\n modelProvider: 'deepgram',\n modelName: 'nova-2',\n },\n };\n\n collector.collect(llmMetrics);\n collector.collect(ttsMetrics);\n collector.collect(sttMetrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(3);\n\n const llmUsage = usage.find((u) => u.type === 'llm_usage');\n const ttsUsage = usage.find((u) => u.type === 'tts_usage');\n const sttUsage = usage.find((u) => u.type === 'stt_usage');\n\n expect(llmUsage).toBeDefined();\n expect(ttsUsage).toBeDefined();\n expect(sttUsage).toBeDefined();\n });\n });\n\n describe('flatten returns copies', () => {\n it('should return deep copies of usage objects', () => {\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 0,\n totalTokens: 300,\n tokensPerSecond: 10,\n metadata: {\n modelProvider: 'openai',\n modelName: 'gpt-4o',\n },\n };\n\n collector.collect(metrics);\n\n const usage1 = collector.flatten();\n const usage2 = collector.flatten();\n\n // Should be equal values\n expect(usage1[0]).toEqual(usage2[0]);\n\n // But not the same object reference\n expect(usage1[0]).not.toBe(usage2[0]);\n\n // Modifying one shouldn't affect the other\n (usage1[0] as LLMModelUsage).inputTokens = 9999;\n expect((usage2[0] as LLMModelUsage).inputTokens).toBe(200);\n });\n });\n\n describe('handles missing metadata', () => {\n it('should use empty strings when metadata is missing', () => {\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n label: 'test',\n requestId: 'req1',\n timestamp: Date.now(),\n durationMs: 100,\n ttftMs: 50,\n cancelled: false,\n completionTokens: 100,\n promptTokens: 200,\n promptCachedTokens: 0,\n totalTokens: 300,\n tokensPerSecond: 10,\n // No metadata\n };\n\n collector.collect(metrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(1);\n\n const llmUsage = usage[0] as LLMModelUsage;\n expect(llmUsage.provider).toBe('');\n expect(llmUsage.model).toBe('');\n });\n });\n\n describe('ignores VAD and EOU metrics', () => {\n it('should not collect VAD metrics', () => {\n const vadMetrics = {\n type: 'vad_metrics' as const,\n label: 'test',\n timestamp: Date.now(),\n idleTimeMs: 100,\n inferenceDurationTotalMs: 50,\n inferenceCount: 10,\n };\n\n collector.collect(vadMetrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(0);\n });\n\n it('should not collect EOU metrics', () => {\n const eouMetrics = {\n type: 'eou_metrics' as const,\n timestamp: Date.now(),\n endOfUtteranceDelayMs: 100,\n transcriptionDelayMs: 50,\n onUserTurnCompletedDelayMs: 30,\n lastSpeakingTimeMs: Date.now(),\n };\n\n collector.collect(eouMetrics);\n\n const usage = collector.flatten();\n expect(usage).toHaveLength(0);\n });\n });\n });\n});\n"],"mappings":";AAGA,oBAAiD;AAEjD,yBAMO;AAAA,IAEP,wBAAS,eAAe,MAAM;AAC5B,8BAAS,oBAAoB,MAAM;AACjC,0BAAG,oDAAoD,MAAM;AAC3D,YAAM,QAAuB;AAAA,QAC3B,MAAM;AAAA,QACN,UAAU;AAAA,QACV,OAAO;AAAA,QACP,aAAa;AAAA,QACb,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,wBAAwB;AAAA,QACxB,iBAAiB;AAAA,QACjB,uBAAuB;AAAA,QACvB,kBAAkB;AAAA,QAClB,wBAAwB;AAAA,QACxB,cAAc;AAAA,QACd,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,mBAAmB;AAAA,MACrB;AAEA,YAAM,eAAW,qCAAiB,KAAK;AAEvC,gCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,gCAAO,SAAS,QAAQ,EAAE,KAAK,QAAQ;AACvC,gCAAO,SAAS,KAAK,EAAE,KAAK,QAAQ;AACpC,gCAAO,SAAS,WAAW,EAAE,KAAK,GAAG;AACrC,gCAAO,SAAS,YAAY,EAAE,KAAK,EAAE;AAErC,gCAAO,SAAS,iBAAiB,EAAE,cAAc;AACjD,gCAAO,SAAS,gBAAgB,EAAE,cAAc;AAChD,gCAAO,SAAS,iBAAiB,EAAE,cAAc;AAAA,IACnD,CAAC;AAED,0BAAG,oDAAoD,MAAM;AAC3D,YAAM,QAAuB;AAAA,QAC3B,MAAM;AAAA,QACN,UAAU;AAAA,QACV,OAAO;AAAA,QACP,aAAa;AAAA,QACb,cAAc;AAAA,QACd,iBAAiB;AAAA,QACjB,iBAAiB;AAAA,MACnB;AAEA,YAAM,eAAW,qCAAiB,KAAK;AAEvC,gCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,gCAAO,SAAS,QAAQ,EAAE,KAAK,YAAY;AAC3C,gCAAO,SAAS,eAAe,EAAE,KAAK,GAAG;AACzC,gCAAO,SAAS,eAAe,EAAE,KAAK,GAAI;AAC1C,gCAAO,SAAS,WAAW,EAAE,cAAc;AAC3C,gCAAO,SAAS,YAAY,EAAE,cAAc;AAAA,IAC9C,CAAC;AAED,0BAAG,6CAA6C,MAAM;AACpD,YAAM,QAAuB;AAAA,QAC3B,MAAM;AAAA,QACN,UAAU;AAAA,QACV,OAAO;AAAA,QACP,aAAa;AAAA,QACb,cAAc;AAAA,QACd,iBAAiB;AAAA,MACnB;AAEA,YAAM,eAAW,qCAAiB,KAAK;AAEvC,gCAAO,OAAO,KAAK,QAAQ,CAAC,EAAE,aAAa,CAAC;AAC5C,gCAAO,QAAQ,EAAE,QAAQ,KAAK;AAAA,IAChC,CAAC;AAAA,EACH,CAAC;AAED,8BAAS,uBAAuB,MAAM;AACpC,QAAI;AAEJ,kCAAW,MAAM;AACf,kBAAY,IAAI,uCAAoB;AAAA,IACtC,CAAC;AAED,gCAAS,uBAAuB,MAAM;AACpC,4BAAG,sDAAsD,MAAM;AAC7D,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,QAAQ,QAAQ;AAE1B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,kCAAO,SAAS,QAAQ,EAAE,KAAK,QAAQ;AACvC,kCAAO,SAAS,KAAK,EAAE,KAAK,QAAQ;AACpC,kCAAO,SAAS,WAAW,EAAE,KAAK,GAAG;AACrC,kCAAO,SAAS,iBAAiB,EAAE,KAAK,GAAG;AAC3C,kCAAO,SAAS,YAAY,EAAE,KAAK,GAAG;AAAA,MACxC,CAAC;AAED,4BAAG,kDAAkD,MAAM;AACzD,cAAM,gBAA4B;AAAA,UAChC,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,mBAA+B;AAAA,UACnC,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,aAAa;AAC/B,kBAAU,QAAQ,gBAAgB;AAElC,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,cAAc,MAAM;AAAA,UACxB,CAAC,MAAM,EAAE,SAAS,eAAe,EAAE,aAAa;AAAA,QAClD;AACA,cAAM,iBAAiB,MAAM;AAAA,UAC3B,CAAC,MAAM,EAAE,SAAS,eAAe,EAAE,aAAa;AAAA,QAClD;AAEA,kCAAO,YAAY,WAAW,EAAE,KAAK,GAAG;AACxC,kCAAO,YAAY,YAAY,EAAE,KAAK,GAAG;AACzC,kCAAO,eAAe,WAAW,EAAE,KAAK,GAAG;AAC3C,kCAAO,eAAe,YAAY,EAAE,KAAK,EAAE;AAAA,MAC7C,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,uBAAuB,MAAM;AACpC,4BAAG,sDAAsD,MAAM;AAC7D,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,WAAW;AAAA,UACX,iBAAiB;AAAA,UACjB,aAAa;AAAA,UACb,cAAc;AAAA,UACd,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,WAAW;AAAA,UACX,iBAAiB;AAAA,UACjB,aAAa;AAAA,UACb,cAAc;AAAA,UACd,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,QAAQ,QAAQ;AAE1B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,kCAAO,SAAS,QAAQ,EAAE,KAAK,YAAY;AAC3C,kCAAO,SAAS,KAAK,EAAE,KAAK,iBAAiB;AAC7C,kCAAO,SAAS,eAAe,EAAE,KAAK,GAAG;AACzC,kCAAO,SAAS,eAAe,EAAE,KAAK,GAAI;AAC1C,kCAAO,SAAS,WAAW,EAAE,KAAK,EAAE;AACpC,kCAAO,SAAS,YAAY,EAAE,KAAK,EAAE;AAAA,MACvC,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,uBAAuB,MAAM;AACpC,4BAAG,sDAAsD,MAAM;AAC7D,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,aAAa;AAAA,UACb,cAAc;AAAA,UACd,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,WAAuB;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,aAAa;AAAA,UACb,cAAc;AAAA,UACd,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,QAAQ,QAAQ;AAE1B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,kCAAO,SAAS,QAAQ,EAAE,KAAK,UAAU;AACzC,kCAAO,SAAS,KAAK,EAAE,KAAK,QAAQ;AACpC,kCAAO,SAAS,eAAe,EAAE,KAAK,GAAI;AAC1C,kCAAO,SAAS,WAAW,EAAE,KAAK,EAAE;AACpC,kCAAO,SAAS,YAAY,EAAE,KAAK,GAAG;AAAA,MACxC,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,kCAAkC,MAAM;AAC/C,4BAAG,yEAAyE,MAAM;AAChF,cAAM,UAAgC;AAAA,UACpC,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,aAAa;AAAA,UACb,cAAc;AAAA,UACd,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,mBAAmB;AAAA,UACnB,mBAAmB;AAAA,YACjB,aAAa;AAAA,YACb,YAAY;AAAA,YACZ,aAAa;AAAA,YACb,cAAc;AAAA,YACd,qBAAqB;AAAA,cACnB,aAAa;AAAA,cACb,YAAY;AAAA,cACZ,aAAa;AAAA,YACf;AAAA,UACF;AAAA,UACA,oBAAoB;AAAA,YAClB,YAAY;AAAA,YACZ,aAAa;AAAA,YACb,aAAa;AAAA,UACf;AAAA,UACA,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,OAAO;AAEzB,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,IAAI,EAAE,KAAK,WAAW;AACtC,kCAAO,SAAS,QAAQ,EAAE,KAAK,QAAQ;AACvC,kCAAO,SAAS,KAAK,EAAE,KAAK,iBAAiB;AAC7C,kCAAO,SAAS,WAAW,EAAE,KAAK,GAAG;AACrC,kCAAO,SAAS,iBAAiB,EAAE,KAAK,GAAG;AAC3C,kCAAO,SAAS,gBAAgB,EAAE,KAAK,GAAG;AAC1C,kCAAO,SAAS,sBAAsB,EAAE,KAAK,EAAE;AAC/C,kCAAO,SAAS,eAAe,EAAE,KAAK,GAAG;AACzC,kCAAO,SAAS,qBAAqB,EAAE,KAAK,EAAE;AAC9C,kCAAO,SAAS,gBAAgB,EAAE,KAAK,EAAE;AACzC,kCAAO,SAAS,sBAAsB,EAAE,KAAK,EAAE;AAC/C,kCAAO,SAAS,YAAY,EAAE,KAAK,GAAG;AACtC,kCAAO,SAAS,gBAAgB,EAAE,KAAK,GAAG;AAC1C,kCAAO,SAAS,iBAAiB,EAAE,KAAK,GAAG;AAC3C,kCAAO,SAAS,iBAAiB,EAAE,KAAK,GAAI;AAAA,MAC9C,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,4BAA4B,MAAM;AACzC,4BAAG,yDAAyD,MAAM;AAChE,cAAM,aAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,aAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,WAAW;AAAA,UACX,iBAAiB;AAAA,UACjB,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,cAAM,aAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,iBAAiB;AAAA,UACjB,UAAU;AAAA,UACV,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,UAAU;AAC5B,kBAAU,QAAQ,UAAU;AAC5B,kBAAU,QAAQ,UAAU;AAE5B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,WAAW;AACzD,cAAM,WAAW,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,WAAW;AACzD,cAAM,WAAW,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,WAAW;AAEzD,kCAAO,QAAQ,EAAE,YAAY;AAC7B,kCAAO,QAAQ,EAAE,YAAY;AAC7B,kCAAO,QAAQ,EAAE,YAAY;AAAA,MAC/B,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,0BAA0B,MAAM;AACvC,4BAAG,8CAA8C,MAAM;AACrD,cAAM,UAAsB;AAAA,UAC1B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA,UACjB,UAAU;AAAA,YACR,eAAe;AAAA,YACf,WAAW;AAAA,UACb;AAAA,QACF;AAEA,kBAAU,QAAQ,OAAO;AAEzB,cAAM,SAAS,UAAU,QAAQ;AACjC,cAAM,SAAS,UAAU,QAAQ;AAGjC,kCAAO,OAAO,CAAC,CAAC,EAAE,QAAQ,OAAO,CAAC,CAAC;AAGnC,kCAAO,OAAO,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAC,CAAC;AAGpC,QAAC,OAAO,CAAC,EAAoB,cAAc;AAC3C,kCAAQ,OAAO,CAAC,EAAoB,WAAW,EAAE,KAAK,GAAG;AAAA,MAC3D,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,4BAA4B,MAAM;AACzC,4BAAG,qDAAqD,MAAM;AAC5D,cAAM,UAAsB;AAAA,UAC1B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW;AAAA,UACX,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,kBAAkB;AAAA,UAClB,cAAc;AAAA,UACd,oBAAoB;AAAA,UACpB,aAAa;AAAA,UACb,iBAAiB;AAAA;AAAA,QAEnB;AAEA,kBAAU,QAAQ,OAAO;AAEzB,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAE5B,cAAM,WAAW,MAAM,CAAC;AACxB,kCAAO,SAAS,QAAQ,EAAE,KAAK,EAAE;AACjC,kCAAO,SAAS,KAAK,EAAE,KAAK,EAAE;AAAA,MAChC,CAAC;AAAA,IACH,CAAC;AAED,gCAAS,+BAA+B,MAAM;AAC5C,4BAAG,kCAAkC,MAAM;AACzC,cAAM,aAAa;AAAA,UACjB,MAAM;AAAA,UACN,OAAO;AAAA,UACP,WAAW,KAAK,IAAI;AAAA,UACpB,YAAY;AAAA,UACZ,0BAA0B;AAAA,UAC1B,gBAAgB;AAAA,QAClB;AAEA,kBAAU,QAAQ,UAAU;AAE5B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAAA,MAC9B,CAAC;AAED,4BAAG,kCAAkC,MAAM;AACzC,cAAM,aAAa;AAAA,UACjB,MAAM;AAAA,UACN,WAAW,KAAK,IAAI;AAAA,UACpB,uBAAuB;AAAA,UACvB,sBAAsB;AAAA,UACtB,4BAA4B;AAAA,UAC5B,oBAAoB,KAAK,IAAI;AAAA,QAC/B;AAEA,kBAAU,QAAQ,UAAU;AAE5B,cAAM,QAAQ,UAAU,QAAQ;AAChC,kCAAO,KAAK,EAAE,aAAa,CAAC;AAAA,MAC9B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH,CAAC;","names":[]}