@livekit/agents 1.0.39 → 1.0.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (154) hide show
  1. package/dist/cli.cjs +20 -18
  2. package/dist/cli.cjs.map +1 -1
  3. package/dist/cli.d.ts.map +1 -1
  4. package/dist/cli.js +20 -18
  5. package/dist/cli.js.map +1 -1
  6. package/dist/http_server.cjs +9 -6
  7. package/dist/http_server.cjs.map +1 -1
  8. package/dist/http_server.d.cts +5 -1
  9. package/dist/http_server.d.ts +5 -1
  10. package/dist/http_server.d.ts.map +1 -1
  11. package/dist/http_server.js +9 -6
  12. package/dist/http_server.js.map +1 -1
  13. package/dist/index.cjs +5 -0
  14. package/dist/index.cjs.map +1 -1
  15. package/dist/index.d.cts +1 -0
  16. package/dist/index.d.ts +1 -0
  17. package/dist/index.d.ts.map +1 -1
  18. package/dist/index.js +3 -0
  19. package/dist/index.js.map +1 -1
  20. package/dist/inference/stt.cjs +2 -1
  21. package/dist/inference/stt.cjs.map +1 -1
  22. package/dist/inference/stt.d.ts.map +1 -1
  23. package/dist/inference/stt.js +2 -1
  24. package/dist/inference/stt.js.map +1 -1
  25. package/dist/ipc/supervised_proc.cjs +4 -0
  26. package/dist/ipc/supervised_proc.cjs.map +1 -1
  27. package/dist/ipc/supervised_proc.d.cts +1 -0
  28. package/dist/ipc/supervised_proc.d.ts +1 -0
  29. package/dist/ipc/supervised_proc.d.ts.map +1 -1
  30. package/dist/ipc/supervised_proc.js +4 -0
  31. package/dist/ipc/supervised_proc.js.map +1 -1
  32. package/dist/llm/realtime.cjs.map +1 -1
  33. package/dist/llm/realtime.d.cts +5 -1
  34. package/dist/llm/realtime.d.ts +5 -1
  35. package/dist/llm/realtime.d.ts.map +1 -1
  36. package/dist/llm/realtime.js.map +1 -1
  37. package/dist/tokenize/basic/sentence.cjs +3 -3
  38. package/dist/tokenize/basic/sentence.cjs.map +1 -1
  39. package/dist/tokenize/basic/sentence.js +3 -3
  40. package/dist/tokenize/basic/sentence.js.map +1 -1
  41. package/dist/tokenize/tokenizer.test.cjs +3 -1
  42. package/dist/tokenize/tokenizer.test.cjs.map +1 -1
  43. package/dist/tokenize/tokenizer.test.js +3 -1
  44. package/dist/tokenize/tokenizer.test.js.map +1 -1
  45. package/dist/tts/stream_adapter.cjs +15 -1
  46. package/dist/tts/stream_adapter.cjs.map +1 -1
  47. package/dist/tts/stream_adapter.d.ts.map +1 -1
  48. package/dist/tts/stream_adapter.js +15 -1
  49. package/dist/tts/stream_adapter.js.map +1 -1
  50. package/dist/tts/tts.cjs.map +1 -1
  51. package/dist/tts/tts.d.cts +9 -1
  52. package/dist/tts/tts.d.ts +9 -1
  53. package/dist/tts/tts.d.ts.map +1 -1
  54. package/dist/tts/tts.js.map +1 -1
  55. package/dist/types.cjs +3 -0
  56. package/dist/types.cjs.map +1 -1
  57. package/dist/types.d.cts +4 -0
  58. package/dist/types.d.ts +4 -0
  59. package/dist/types.d.ts.map +1 -1
  60. package/dist/types.js +2 -0
  61. package/dist/types.js.map +1 -1
  62. package/dist/voice/agent.cjs +11 -1
  63. package/dist/voice/agent.cjs.map +1 -1
  64. package/dist/voice/agent.d.cts +7 -3
  65. package/dist/voice/agent.d.ts +7 -3
  66. package/dist/voice/agent.d.ts.map +1 -1
  67. package/dist/voice/agent.js +11 -1
  68. package/dist/voice/agent.js.map +1 -1
  69. package/dist/voice/agent_activity.cjs +30 -14
  70. package/dist/voice/agent_activity.cjs.map +1 -1
  71. package/dist/voice/agent_activity.d.cts +1 -0
  72. package/dist/voice/agent_activity.d.ts +1 -0
  73. package/dist/voice/agent_activity.d.ts.map +1 -1
  74. package/dist/voice/agent_activity.js +30 -14
  75. package/dist/voice/agent_activity.js.map +1 -1
  76. package/dist/voice/agent_session.cjs +5 -1
  77. package/dist/voice/agent_session.cjs.map +1 -1
  78. package/dist/voice/agent_session.d.cts +2 -0
  79. package/dist/voice/agent_session.d.ts +2 -0
  80. package/dist/voice/agent_session.d.ts.map +1 -1
  81. package/dist/voice/agent_session.js +5 -1
  82. package/dist/voice/agent_session.js.map +1 -1
  83. package/dist/voice/background_audio.cjs +2 -1
  84. package/dist/voice/background_audio.cjs.map +1 -1
  85. package/dist/voice/background_audio.d.cts +4 -2
  86. package/dist/voice/background_audio.d.ts +4 -2
  87. package/dist/voice/background_audio.d.ts.map +1 -1
  88. package/dist/voice/background_audio.js +2 -1
  89. package/dist/voice/background_audio.js.map +1 -1
  90. package/dist/voice/generation.cjs +58 -5
  91. package/dist/voice/generation.cjs.map +1 -1
  92. package/dist/voice/generation.d.cts +17 -3
  93. package/dist/voice/generation.d.ts +17 -3
  94. package/dist/voice/generation.d.ts.map +1 -1
  95. package/dist/voice/generation.js +63 -6
  96. package/dist/voice/generation.js.map +1 -1
  97. package/dist/voice/index.cjs.map +1 -1
  98. package/dist/voice/index.d.cts +1 -1
  99. package/dist/voice/index.d.ts +1 -1
  100. package/dist/voice/index.d.ts.map +1 -1
  101. package/dist/voice/index.js.map +1 -1
  102. package/dist/voice/io.cjs +22 -2
  103. package/dist/voice/io.cjs.map +1 -1
  104. package/dist/voice/io.d.cts +21 -5
  105. package/dist/voice/io.d.ts +21 -5
  106. package/dist/voice/io.d.ts.map +1 -1
  107. package/dist/voice/io.js +18 -1
  108. package/dist/voice/io.js.map +1 -1
  109. package/dist/voice/room_io/_output.cjs +3 -2
  110. package/dist/voice/room_io/_output.cjs.map +1 -1
  111. package/dist/voice/room_io/_output.d.cts +3 -3
  112. package/dist/voice/room_io/_output.d.ts +3 -3
  113. package/dist/voice/room_io/_output.d.ts.map +1 -1
  114. package/dist/voice/room_io/_output.js +4 -3
  115. package/dist/voice/room_io/_output.js.map +1 -1
  116. package/dist/voice/transcription/synchronizer.cjs +137 -13
  117. package/dist/voice/transcription/synchronizer.cjs.map +1 -1
  118. package/dist/voice/transcription/synchronizer.d.cts +34 -4
  119. package/dist/voice/transcription/synchronizer.d.ts +34 -4
  120. package/dist/voice/transcription/synchronizer.d.ts.map +1 -1
  121. package/dist/voice/transcription/synchronizer.js +141 -14
  122. package/dist/voice/transcription/synchronizer.js.map +1 -1
  123. package/dist/voice/transcription/synchronizer.test.cjs +151 -0
  124. package/dist/voice/transcription/synchronizer.test.cjs.map +1 -0
  125. package/dist/voice/transcription/synchronizer.test.js +150 -0
  126. package/dist/voice/transcription/synchronizer.test.js.map +1 -0
  127. package/dist/worker.cjs +12 -2
  128. package/dist/worker.cjs.map +1 -1
  129. package/dist/worker.d.ts.map +1 -1
  130. package/dist/worker.js +12 -2
  131. package/dist/worker.js.map +1 -1
  132. package/package.json +1 -1
  133. package/src/cli.ts +20 -18
  134. package/src/http_server.ts +18 -6
  135. package/src/index.ts +1 -0
  136. package/src/inference/stt.ts +9 -8
  137. package/src/ipc/supervised_proc.ts +4 -0
  138. package/src/llm/realtime.ts +5 -1
  139. package/src/tokenize/basic/sentence.ts +3 -3
  140. package/src/tokenize/tokenizer.test.ts +4 -0
  141. package/src/tts/stream_adapter.ts +23 -1
  142. package/src/tts/tts.ts +10 -1
  143. package/src/types.ts +5 -0
  144. package/src/voice/agent.ts +19 -4
  145. package/src/voice/agent_activity.ts +38 -13
  146. package/src/voice/agent_session.ts +6 -0
  147. package/src/voice/background_audio.ts +6 -3
  148. package/src/voice/generation.ts +115 -10
  149. package/src/voice/index.ts +1 -1
  150. package/src/voice/io.ts +40 -5
  151. package/src/voice/room_io/_output.ts +6 -5
  152. package/src/voice/transcription/synchronizer.test.ts +206 -0
  153. package/src/voice/transcription/synchronizer.ts +202 -17
  154. package/src/worker.ts +24 -2
@@ -27,7 +27,7 @@ const splitSentences = (text, minLength = 20, retainFormat = false) => {
27
27
  const suffixes = /(Inc|Ltd|Jr|Sr|Co)/g;
28
28
  const starters = /(Mr|Mrs|Ms|Dr|Prof|Capt|Cpt|Lt|He\s|She\s|It\s|They\s|Their\s|Our\s|We\s|But\s|However\s|That\s|This\s|Wherever)/g;
29
29
  const acronyms = /([A-Z][.][A-Z][.](?:[A-Z][.])?)/g;
30
- const websites = /[.](com|net|org|io|gov|edu|me)/g;
30
+ const websites = /(\w+\.)+(com|net|org|io|gov|edu|me)/g;
31
31
  const digits = /([0-9])/g;
32
32
  const dots = /\.{2,}/g;
33
33
  if (retainFormat) {
@@ -36,7 +36,7 @@ const splitSentences = (text, minLength = 20, retainFormat = false) => {
36
36
  text = text.replaceAll("\n", " ");
37
37
  }
38
38
  text = text.replaceAll(prefixes, "$1<prd>");
39
- text = text.replaceAll(websites, "<prd>$2");
39
+ text = text.replace(websites, (match) => match.replaceAll(".", "<prd>"));
40
40
  text = text.replaceAll(new RegExp(`${digits.source}[.]${digits.source}`, "g"), "$1<prd>$2");
41
41
  text = text.replaceAll(dots, (match) => "<prd>".repeat(match.length));
42
42
  text = text.replaceAll("Ph.D.", "Ph<prd>D<prd>");
@@ -60,7 +60,7 @@ const splitSentences = (text, minLength = 20, retainFormat = false) => {
60
60
  text = text.replaceAll('."', '".');
61
61
  text = text.replaceAll('!"', '"!');
62
62
  text = text.replaceAll('?"', '"?');
63
- text = text.replaceAll(".", ".<stop>");
63
+ text = text.replace(/\.(?=\s|$)/g, ".<stop>");
64
64
  text = text.replaceAll("?", "?<stop>");
65
65
  text = text.replaceAll("!", "!<stop>");
66
66
  text = text.replaceAll("<prd>", ".");
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/tokenize/basic/sentence.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\n\n/**\n * Split the text into sentences.\n */\nexport const splitSentences = (\n text: string,\n minLength = 20,\n retainFormat: boolean = false,\n): [string, number, number][] => {\n const alphabets = /([A-Za-z])/g;\n const prefixes = /(Mr|St|Mrs|Ms|Dr)[.]/g;\n const suffixes = /(Inc|Ltd|Jr|Sr|Co)/g;\n const starters =\n /(Mr|Mrs|Ms|Dr|Prof|Capt|Cpt|Lt|He\\s|She\\s|It\\s|They\\s|Their\\s|Our\\s|We\\s|But\\s|However\\s|That\\s|This\\s|Wherever)/g;\n const acronyms = /([A-Z][.][A-Z][.](?:[A-Z][.])?)/g;\n const websites = /[.](com|net|org|io|gov|edu|me)/g;\n const digits = /([0-9])/g;\n const dots = /\\.{2,}/g;\n\n if (retainFormat) {\n text = text.replaceAll('\\n', '<nel><stop>');\n } else {\n text = text.replaceAll('\\n', ' ');\n }\n\n text = text.replaceAll(prefixes, '$1<prd>');\n text = text.replaceAll(websites, '<prd>$2');\n text = text.replaceAll(new RegExp(`${digits.source}[.]${digits.source}`, 'g'), '$1<prd>$2');\n text = text.replaceAll(dots, (match) => '<prd>'.repeat(match.length));\n text = text.replaceAll('Ph.D.', 'Ph<prd>D<prd>');\n text = text.replaceAll(new RegExp(`\\\\s${alphabets.source}[.] `, 'g'), ' $1<prd> ');\n text = text.replaceAll(new RegExp(`${acronyms.source} ${starters.source}`, 'g'), '$1<stop> $2');\n text = text.replaceAll(\n new RegExp(`${alphabets.source}[.]${alphabets.source}[.]${alphabets.source}[.]`, 'g'),\n '$1<prd>$2<prd>$3<prd>',\n );\n text = text.replaceAll(\n new RegExp(`${alphabets.source}[.]${alphabets.source}[.]`, 'g'),\n '$1<prd>$2<prd>',\n );\n text = text.replaceAll(\n new RegExp(` ${suffixes.source}[.] ${starters.source}`, 'g'),\n '$1<stop> $2',\n );\n text = text.replaceAll(new RegExp(` ${suffixes.source}[.]`, 'g'), '$1<prd>');\n text = text.replaceAll(new RegExp(` ${alphabets.source}[.]`, 'g'), '$1<prd>');\n text = text.replaceAll('.”', '”.');\n text = text.replaceAll('.\"', '\".');\n text = text.replaceAll('!\"', '\"!');\n text = text.replaceAll('?\"', '\"?');\n text = text.replaceAll('.', '.<stop>');\n text = text.replaceAll('?', '?<stop>');\n text = text.replaceAll('!', '!<stop>');\n text = text.replaceAll('<prd>', '.');\n\n if (retainFormat) {\n text = text.replaceAll('<nel>', '\\n');\n }\n\n const split = text.split('<stop>');\n text = text.replaceAll('<stop>', '');\n\n const sentences: [string, number, number][] = [];\n let buf = '';\n let start = 0;\n let end = 0;\n const prePad = retainFormat ? '' : ' ';\n for (const match of split) {\n const sentence = retainFormat ? match : match.trim();\n if (!sentence) continue;\n\n buf += prePad + sentence;\n end += match.length;\n if (buf.length > minLength) {\n sentences.push([buf.slice(prePad.length), start, end]);\n start = end;\n buf = '';\n }\n }\n\n if (buf) {\n sentences.push([buf.slice(prePad.length), start, text.length - 1]);\n }\n\n return sentences;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAOO,MAAM,iBAAiB,CAC5B,MACA,YAAY,IACZ,eAAwB,UACO;AAC/B,QAAM,YAAY;AAClB,QAAM,WAAW;AACjB,QAAM,WAAW;AACjB,QAAM,WACJ;AACF,QAAM,WAAW;AACjB,QAAM,WAAW;AACjB,QAAM,SAAS;AACf,QAAM,OAAO;AAEb,MAAI,cAAc;AAChB,WAAO,KAAK,WAAW,MAAM,aAAa;AAAA,EAC5C,OAAO;AACL,WAAO,KAAK,WAAW,MAAM,GAAG;AAAA,EAClC;AAEA,SAAO,KAAK,WAAW,UAAU,SAAS;AAC1C,SAAO,KAAK,WAAW,UAAU,SAAS;AAC1C,SAAO,KAAK,WAAW,IAAI,OAAO,GAAG,OAAO,MAAM,MAAM,OAAO,MAAM,IAAI,GAAG,GAAG,WAAW;AAC1F,SAAO,KAAK,WAAW,MAAM,CAAC,UAAU,QAAQ,OAAO,MAAM,MAAM,CAAC;AACpE,SAAO,KAAK,WAAW,SAAS,eAAe;AAC/C,SAAO,KAAK,WAAW,IAAI,OAAO,MAAM,UAAU,MAAM,QAAQ,GAAG,GAAG,WAAW;AACjF,SAAO,KAAK,WAAW,IAAI,OAAO,GAAG,SAAS,MAAM,IAAI,SAAS,MAAM,IAAI,GAAG,GAAG,aAAa;AAC9F,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,GAAG,UAAU,MAAM,MAAM,UAAU,MAAM,MAAM,UAAU,MAAM,OAAO,GAAG;AAAA,IACpF;AAAA,EACF;AACA,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,GAAG,UAAU,MAAM,MAAM,UAAU,MAAM,OAAO,GAAG;AAAA,IAC9D;AAAA,EACF;AACA,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,IAAI,SAAS,MAAM,OAAO,SAAS,MAAM,IAAI,GAAG;AAAA,IAC3D;AAAA,EACF;AACA,SAAO,KAAK,WAAW,IAAI,OAAO,IAAI,SAAS,MAAM,OAAO,GAAG,GAAG,SAAS;AAC3E,SAAO,KAAK,WAAW,IAAI,OAAO,IAAI,UAAU,MAAM,OAAO,GAAG,GAAG,SAAS;AAC5E,SAAO,KAAK,WAAW,WAAM,SAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,SAAS,GAAG;AAEnC,MAAI,cAAc;AAChB,WAAO,KAAK,WAAW,SAAS,IAAI;AAAA,EACtC;AAEA,QAAM,QAAQ,KAAK,MAAM,QAAQ;AACjC,SAAO,KAAK,WAAW,UAAU,EAAE;AAEnC,QAAM,YAAwC,CAAC;AAC/C,MAAI,MAAM;AACV,MAAI,QAAQ;AACZ,MAAI,MAAM;AACV,QAAM,SAAS,eAAe,KAAK;AACnC,aAAW,SAAS,OAAO;AACzB,UAAM,WAAW,eAAe,QAAQ,MAAM,KAAK;AACnD,QAAI,CAAC,SAAU;AAEf,WAAO,SAAS;AAChB,WAAO,MAAM;AACb,QAAI,IAAI,SAAS,WAAW;AAC1B,gBAAU,KAAK,CAAC,IAAI,MAAM,OAAO,MAAM,GAAG,OAAO,GAAG,CAAC;AACrD,cAAQ;AACR,YAAM;AAAA,IACR;AAAA,EACF;AAEA,MAAI,KAAK;AACP,cAAU,KAAK,CAAC,IAAI,MAAM,OAAO,MAAM,GAAG,OAAO,KAAK,SAAS,CAAC,CAAC;AAAA,EACnE;AAEA,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../../../src/tokenize/basic/sentence.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\n\n/**\n * Split the text into sentences.\n */\nexport const splitSentences = (\n text: string,\n minLength = 20,\n retainFormat: boolean = false,\n): [string, number, number][] => {\n const alphabets = /([A-Za-z])/g;\n const prefixes = /(Mr|St|Mrs|Ms|Dr)[.]/g;\n const suffixes = /(Inc|Ltd|Jr|Sr|Co)/g;\n const starters =\n /(Mr|Mrs|Ms|Dr|Prof|Capt|Cpt|Lt|He\\s|She\\s|It\\s|They\\s|Their\\s|Our\\s|We\\s|But\\s|However\\s|That\\s|This\\s|Wherever)/g;\n const acronyms = /([A-Z][.][A-Z][.](?:[A-Z][.])?)/g;\n const websites = /(\\w+\\.)+(com|net|org|io|gov|edu|me)/g;\n const digits = /([0-9])/g;\n const dots = /\\.{2,}/g;\n\n if (retainFormat) {\n text = text.replaceAll('\\n', '<nel><stop>');\n } else {\n text = text.replaceAll('\\n', ' ');\n }\n\n text = text.replaceAll(prefixes, '$1<prd>');\n text = text.replace(websites, (match) => match.replaceAll('.', '<prd>'));\n text = text.replaceAll(new RegExp(`${digits.source}[.]${digits.source}`, 'g'), '$1<prd>$2');\n text = text.replaceAll(dots, (match) => '<prd>'.repeat(match.length));\n text = text.replaceAll('Ph.D.', 'Ph<prd>D<prd>');\n text = text.replaceAll(new RegExp(`\\\\s${alphabets.source}[.] `, 'g'), ' $1<prd> ');\n text = text.replaceAll(new RegExp(`${acronyms.source} ${starters.source}`, 'g'), '$1<stop> $2');\n text = text.replaceAll(\n new RegExp(`${alphabets.source}[.]${alphabets.source}[.]${alphabets.source}[.]`, 'g'),\n '$1<prd>$2<prd>$3<prd>',\n );\n text = text.replaceAll(\n new RegExp(`${alphabets.source}[.]${alphabets.source}[.]`, 'g'),\n '$1<prd>$2<prd>',\n );\n text = text.replaceAll(\n new RegExp(` ${suffixes.source}[.] ${starters.source}`, 'g'),\n '$1<stop> $2',\n );\n text = text.replaceAll(new RegExp(` ${suffixes.source}[.]`, 'g'), '$1<prd>');\n text = text.replaceAll(new RegExp(` ${alphabets.source}[.]`, 'g'), '$1<prd>');\n text = text.replaceAll('.”', '”.');\n text = text.replaceAll('.\"', '\".');\n text = text.replaceAll('!\"', '\"!');\n text = text.replaceAll('?\"', '\"?');\n text = text.replace(/\\.(?=\\s|$)/g, '.<stop>');\n text = text.replaceAll('?', '?<stop>');\n text = text.replaceAll('!', '!<stop>');\n text = text.replaceAll('<prd>', '.');\n\n if (retainFormat) {\n text = text.replaceAll('<nel>', '\\n');\n }\n\n const split = text.split('<stop>');\n text = text.replaceAll('<stop>', '');\n\n const sentences: [string, number, number][] = [];\n let buf = '';\n let start = 0;\n let end = 0;\n const prePad = retainFormat ? '' : ' ';\n for (const match of split) {\n const sentence = retainFormat ? match : match.trim();\n if (!sentence) continue;\n\n buf += prePad + sentence;\n end += match.length;\n if (buf.length > minLength) {\n sentences.push([buf.slice(prePad.length), start, end]);\n start = end;\n buf = '';\n }\n }\n\n if (buf) {\n sentences.push([buf.slice(prePad.length), start, text.length - 1]);\n }\n\n return sentences;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAOO,MAAM,iBAAiB,CAC5B,MACA,YAAY,IACZ,eAAwB,UACO;AAC/B,QAAM,YAAY;AAClB,QAAM,WAAW;AACjB,QAAM,WAAW;AACjB,QAAM,WACJ;AACF,QAAM,WAAW;AACjB,QAAM,WAAW;AACjB,QAAM,SAAS;AACf,QAAM,OAAO;AAEb,MAAI,cAAc;AAChB,WAAO,KAAK,WAAW,MAAM,aAAa;AAAA,EAC5C,OAAO;AACL,WAAO,KAAK,WAAW,MAAM,GAAG;AAAA,EAClC;AAEA,SAAO,KAAK,WAAW,UAAU,SAAS;AAC1C,SAAO,KAAK,QAAQ,UAAU,CAAC,UAAU,MAAM,WAAW,KAAK,OAAO,CAAC;AACvE,SAAO,KAAK,WAAW,IAAI,OAAO,GAAG,OAAO,MAAM,MAAM,OAAO,MAAM,IAAI,GAAG,GAAG,WAAW;AAC1F,SAAO,KAAK,WAAW,MAAM,CAAC,UAAU,QAAQ,OAAO,MAAM,MAAM,CAAC;AACpE,SAAO,KAAK,WAAW,SAAS,eAAe;AAC/C,SAAO,KAAK,WAAW,IAAI,OAAO,MAAM,UAAU,MAAM,QAAQ,GAAG,GAAG,WAAW;AACjF,SAAO,KAAK,WAAW,IAAI,OAAO,GAAG,SAAS,MAAM,IAAI,SAAS,MAAM,IAAI,GAAG,GAAG,aAAa;AAC9F,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,GAAG,UAAU,MAAM,MAAM,UAAU,MAAM,MAAM,UAAU,MAAM,OAAO,GAAG;AAAA,IACpF;AAAA,EACF;AACA,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,GAAG,UAAU,MAAM,MAAM,UAAU,MAAM,OAAO,GAAG;AAAA,IAC9D;AAAA,EACF;AACA,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,IAAI,SAAS,MAAM,OAAO,SAAS,MAAM,IAAI,GAAG;AAAA,IAC3D;AAAA,EACF;AACA,SAAO,KAAK,WAAW,IAAI,OAAO,IAAI,SAAS,MAAM,OAAO,GAAG,GAAG,SAAS;AAC3E,SAAO,KAAK,WAAW,IAAI,OAAO,IAAI,UAAU,MAAM,OAAO,GAAG,GAAG,SAAS;AAC5E,SAAO,KAAK,WAAW,WAAM,SAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,QAAQ,eAAe,SAAS;AAC5C,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,SAAS,GAAG;AAEnC,MAAI,cAAc;AAChB,WAAO,KAAK,WAAW,SAAS,IAAI;AAAA,EACtC;AAEA,QAAM,QAAQ,KAAK,MAAM,QAAQ;AACjC,SAAO,KAAK,WAAW,UAAU,EAAE;AAEnC,QAAM,YAAwC,CAAC;AAC/C,MAAI,MAAM;AACV,MAAI,QAAQ;AACZ,MAAI,MAAM;AACV,QAAM,SAAS,eAAe,KAAK;AACnC,aAAW,SAAS,OAAO;AACzB,UAAM,WAAW,eAAe,QAAQ,MAAM,KAAK;AACnD,QAAI,CAAC,SAAU;AAEf,WAAO,SAAS;AAChB,WAAO,MAAM;AACb,QAAI,IAAI,SAAS,WAAW;AAC1B,gBAAU,KAAK,CAAC,IAAI,MAAM,OAAO,MAAM,GAAG,OAAO,GAAG,CAAC;AACrD,cAAQ;AACR,YAAM;AAAA,IACR;AAAA,EACF;AAEA,MAAI,KAAK;AACP,cAAU,KAAK,CAAC,IAAI,MAAM,OAAO,MAAM,GAAG,OAAO,KAAK,SAAS,CAAC,CAAC;AAAA,EACnE;AAEA,SAAO;AACT;","names":[]}
@@ -4,7 +4,7 @@ const splitSentences = (text, minLength = 20, retainFormat = false) => {
4
4
  const suffixes = /(Inc|Ltd|Jr|Sr|Co)/g;
5
5
  const starters = /(Mr|Mrs|Ms|Dr|Prof|Capt|Cpt|Lt|He\s|She\s|It\s|They\s|Their\s|Our\s|We\s|But\s|However\s|That\s|This\s|Wherever)/g;
6
6
  const acronyms = /([A-Z][.][A-Z][.](?:[A-Z][.])?)/g;
7
- const websites = /[.](com|net|org|io|gov|edu|me)/g;
7
+ const websites = /(\w+\.)+(com|net|org|io|gov|edu|me)/g;
8
8
  const digits = /([0-9])/g;
9
9
  const dots = /\.{2,}/g;
10
10
  if (retainFormat) {
@@ -13,7 +13,7 @@ const splitSentences = (text, minLength = 20, retainFormat = false) => {
13
13
  text = text.replaceAll("\n", " ");
14
14
  }
15
15
  text = text.replaceAll(prefixes, "$1<prd>");
16
- text = text.replaceAll(websites, "<prd>$2");
16
+ text = text.replace(websites, (match) => match.replaceAll(".", "<prd>"));
17
17
  text = text.replaceAll(new RegExp(`${digits.source}[.]${digits.source}`, "g"), "$1<prd>$2");
18
18
  text = text.replaceAll(dots, (match) => "<prd>".repeat(match.length));
19
19
  text = text.replaceAll("Ph.D.", "Ph<prd>D<prd>");
@@ -37,7 +37,7 @@ const splitSentences = (text, minLength = 20, retainFormat = false) => {
37
37
  text = text.replaceAll('."', '".');
38
38
  text = text.replaceAll('!"', '"!');
39
39
  text = text.replaceAll('?"', '"?');
40
- text = text.replaceAll(".", ".<stop>");
40
+ text = text.replace(/\.(?=\s|$)/g, ".<stop>");
41
41
  text = text.replaceAll("?", "?<stop>");
42
42
  text = text.replaceAll("!", "!<stop>");
43
43
  text = text.replaceAll("<prd>", ".");
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/tokenize/basic/sentence.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\n\n/**\n * Split the text into sentences.\n */\nexport const splitSentences = (\n text: string,\n minLength = 20,\n retainFormat: boolean = false,\n): [string, number, number][] => {\n const alphabets = /([A-Za-z])/g;\n const prefixes = /(Mr|St|Mrs|Ms|Dr)[.]/g;\n const suffixes = /(Inc|Ltd|Jr|Sr|Co)/g;\n const starters =\n /(Mr|Mrs|Ms|Dr|Prof|Capt|Cpt|Lt|He\\s|She\\s|It\\s|They\\s|Their\\s|Our\\s|We\\s|But\\s|However\\s|That\\s|This\\s|Wherever)/g;\n const acronyms = /([A-Z][.][A-Z][.](?:[A-Z][.])?)/g;\n const websites = /[.](com|net|org|io|gov|edu|me)/g;\n const digits = /([0-9])/g;\n const dots = /\\.{2,}/g;\n\n if (retainFormat) {\n text = text.replaceAll('\\n', '<nel><stop>');\n } else {\n text = text.replaceAll('\\n', ' ');\n }\n\n text = text.replaceAll(prefixes, '$1<prd>');\n text = text.replaceAll(websites, '<prd>$2');\n text = text.replaceAll(new RegExp(`${digits.source}[.]${digits.source}`, 'g'), '$1<prd>$2');\n text = text.replaceAll(dots, (match) => '<prd>'.repeat(match.length));\n text = text.replaceAll('Ph.D.', 'Ph<prd>D<prd>');\n text = text.replaceAll(new RegExp(`\\\\s${alphabets.source}[.] `, 'g'), ' $1<prd> ');\n text = text.replaceAll(new RegExp(`${acronyms.source} ${starters.source}`, 'g'), '$1<stop> $2');\n text = text.replaceAll(\n new RegExp(`${alphabets.source}[.]${alphabets.source}[.]${alphabets.source}[.]`, 'g'),\n '$1<prd>$2<prd>$3<prd>',\n );\n text = text.replaceAll(\n new RegExp(`${alphabets.source}[.]${alphabets.source}[.]`, 'g'),\n '$1<prd>$2<prd>',\n );\n text = text.replaceAll(\n new RegExp(` ${suffixes.source}[.] ${starters.source}`, 'g'),\n '$1<stop> $2',\n );\n text = text.replaceAll(new RegExp(` ${suffixes.source}[.]`, 'g'), '$1<prd>');\n text = text.replaceAll(new RegExp(` ${alphabets.source}[.]`, 'g'), '$1<prd>');\n text = text.replaceAll('.”', '”.');\n text = text.replaceAll('.\"', '\".');\n text = text.replaceAll('!\"', '\"!');\n text = text.replaceAll('?\"', '\"?');\n text = text.replaceAll('.', '.<stop>');\n text = text.replaceAll('?', '?<stop>');\n text = text.replaceAll('!', '!<stop>');\n text = text.replaceAll('<prd>', '.');\n\n if (retainFormat) {\n text = text.replaceAll('<nel>', '\\n');\n }\n\n const split = text.split('<stop>');\n text = text.replaceAll('<stop>', '');\n\n const sentences: [string, number, number][] = [];\n let buf = '';\n let start = 0;\n let end = 0;\n const prePad = retainFormat ? '' : ' ';\n for (const match of split) {\n const sentence = retainFormat ? match : match.trim();\n if (!sentence) continue;\n\n buf += prePad + sentence;\n end += match.length;\n if (buf.length > minLength) {\n sentences.push([buf.slice(prePad.length), start, end]);\n start = end;\n buf = '';\n }\n }\n\n if (buf) {\n sentences.push([buf.slice(prePad.length), start, text.length - 1]);\n }\n\n return sentences;\n};\n"],"mappings":"AAOO,MAAM,iBAAiB,CAC5B,MACA,YAAY,IACZ,eAAwB,UACO;AAC/B,QAAM,YAAY;AAClB,QAAM,WAAW;AACjB,QAAM,WAAW;AACjB,QAAM,WACJ;AACF,QAAM,WAAW;AACjB,QAAM,WAAW;AACjB,QAAM,SAAS;AACf,QAAM,OAAO;AAEb,MAAI,cAAc;AAChB,WAAO,KAAK,WAAW,MAAM,aAAa;AAAA,EAC5C,OAAO;AACL,WAAO,KAAK,WAAW,MAAM,GAAG;AAAA,EAClC;AAEA,SAAO,KAAK,WAAW,UAAU,SAAS;AAC1C,SAAO,KAAK,WAAW,UAAU,SAAS;AAC1C,SAAO,KAAK,WAAW,IAAI,OAAO,GAAG,OAAO,MAAM,MAAM,OAAO,MAAM,IAAI,GAAG,GAAG,WAAW;AAC1F,SAAO,KAAK,WAAW,MAAM,CAAC,UAAU,QAAQ,OAAO,MAAM,MAAM,CAAC;AACpE,SAAO,KAAK,WAAW,SAAS,eAAe;AAC/C,SAAO,KAAK,WAAW,IAAI,OAAO,MAAM,UAAU,MAAM,QAAQ,GAAG,GAAG,WAAW;AACjF,SAAO,KAAK,WAAW,IAAI,OAAO,GAAG,SAAS,MAAM,IAAI,SAAS,MAAM,IAAI,GAAG,GAAG,aAAa;AAC9F,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,GAAG,UAAU,MAAM,MAAM,UAAU,MAAM,MAAM,UAAU,MAAM,OAAO,GAAG;AAAA,IACpF;AAAA,EACF;AACA,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,GAAG,UAAU,MAAM,MAAM,UAAU,MAAM,OAAO,GAAG;AAAA,IAC9D;AAAA,EACF;AACA,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,IAAI,SAAS,MAAM,OAAO,SAAS,MAAM,IAAI,GAAG;AAAA,IAC3D;AAAA,EACF;AACA,SAAO,KAAK,WAAW,IAAI,OAAO,IAAI,SAAS,MAAM,OAAO,GAAG,GAAG,SAAS;AAC3E,SAAO,KAAK,WAAW,IAAI,OAAO,IAAI,UAAU,MAAM,OAAO,GAAG,GAAG,SAAS;AAC5E,SAAO,KAAK,WAAW,WAAM,SAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,SAAS,GAAG;AAEnC,MAAI,cAAc;AAChB,WAAO,KAAK,WAAW,SAAS,IAAI;AAAA,EACtC;AAEA,QAAM,QAAQ,KAAK,MAAM,QAAQ;AACjC,SAAO,KAAK,WAAW,UAAU,EAAE;AAEnC,QAAM,YAAwC,CAAC;AAC/C,MAAI,MAAM;AACV,MAAI,QAAQ;AACZ,MAAI,MAAM;AACV,QAAM,SAAS,eAAe,KAAK;AACnC,aAAW,SAAS,OAAO;AACzB,UAAM,WAAW,eAAe,QAAQ,MAAM,KAAK;AACnD,QAAI,CAAC,SAAU;AAEf,WAAO,SAAS;AAChB,WAAO,MAAM;AACb,QAAI,IAAI,SAAS,WAAW;AAC1B,gBAAU,KAAK,CAAC,IAAI,MAAM,OAAO,MAAM,GAAG,OAAO,GAAG,CAAC;AACrD,cAAQ;AACR,YAAM;AAAA,IACR;AAAA,EACF;AAEA,MAAI,KAAK;AACP,cAAU,KAAK,CAAC,IAAI,MAAM,OAAO,MAAM,GAAG,OAAO,KAAK,SAAS,CAAC,CAAC;AAAA,EACnE;AAEA,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../../../src/tokenize/basic/sentence.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\n\n/**\n * Split the text into sentences.\n */\nexport const splitSentences = (\n text: string,\n minLength = 20,\n retainFormat: boolean = false,\n): [string, number, number][] => {\n const alphabets = /([A-Za-z])/g;\n const prefixes = /(Mr|St|Mrs|Ms|Dr)[.]/g;\n const suffixes = /(Inc|Ltd|Jr|Sr|Co)/g;\n const starters =\n /(Mr|Mrs|Ms|Dr|Prof|Capt|Cpt|Lt|He\\s|She\\s|It\\s|They\\s|Their\\s|Our\\s|We\\s|But\\s|However\\s|That\\s|This\\s|Wherever)/g;\n const acronyms = /([A-Z][.][A-Z][.](?:[A-Z][.])?)/g;\n const websites = /(\\w+\\.)+(com|net|org|io|gov|edu|me)/g;\n const digits = /([0-9])/g;\n const dots = /\\.{2,}/g;\n\n if (retainFormat) {\n text = text.replaceAll('\\n', '<nel><stop>');\n } else {\n text = text.replaceAll('\\n', ' ');\n }\n\n text = text.replaceAll(prefixes, '$1<prd>');\n text = text.replace(websites, (match) => match.replaceAll('.', '<prd>'));\n text = text.replaceAll(new RegExp(`${digits.source}[.]${digits.source}`, 'g'), '$1<prd>$2');\n text = text.replaceAll(dots, (match) => '<prd>'.repeat(match.length));\n text = text.replaceAll('Ph.D.', 'Ph<prd>D<prd>');\n text = text.replaceAll(new RegExp(`\\\\s${alphabets.source}[.] `, 'g'), ' $1<prd> ');\n text = text.replaceAll(new RegExp(`${acronyms.source} ${starters.source}`, 'g'), '$1<stop> $2');\n text = text.replaceAll(\n new RegExp(`${alphabets.source}[.]${alphabets.source}[.]${alphabets.source}[.]`, 'g'),\n '$1<prd>$2<prd>$3<prd>',\n );\n text = text.replaceAll(\n new RegExp(`${alphabets.source}[.]${alphabets.source}[.]`, 'g'),\n '$1<prd>$2<prd>',\n );\n text = text.replaceAll(\n new RegExp(` ${suffixes.source}[.] ${starters.source}`, 'g'),\n '$1<stop> $2',\n );\n text = text.replaceAll(new RegExp(` ${suffixes.source}[.]`, 'g'), '$1<prd>');\n text = text.replaceAll(new RegExp(` ${alphabets.source}[.]`, 'g'), '$1<prd>');\n text = text.replaceAll('.”', '”.');\n text = text.replaceAll('.\"', '\".');\n text = text.replaceAll('!\"', '\"!');\n text = text.replaceAll('?\"', '\"?');\n text = text.replace(/\\.(?=\\s|$)/g, '.<stop>');\n text = text.replaceAll('?', '?<stop>');\n text = text.replaceAll('!', '!<stop>');\n text = text.replaceAll('<prd>', '.');\n\n if (retainFormat) {\n text = text.replaceAll('<nel>', '\\n');\n }\n\n const split = text.split('<stop>');\n text = text.replaceAll('<stop>', '');\n\n const sentences: [string, number, number][] = [];\n let buf = '';\n let start = 0;\n let end = 0;\n const prePad = retainFormat ? '' : ' ';\n for (const match of split) {\n const sentence = retainFormat ? match : match.trim();\n if (!sentence) continue;\n\n buf += prePad + sentence;\n end += match.length;\n if (buf.length > minLength) {\n sentences.push([buf.slice(prePad.length), start, end]);\n start = end;\n buf = '';\n }\n }\n\n if (buf) {\n sentences.push([buf.slice(prePad.length), start, text.length - 1]);\n }\n\n return sentences;\n};\n"],"mappings":"AAOO,MAAM,iBAAiB,CAC5B,MACA,YAAY,IACZ,eAAwB,UACO;AAC/B,QAAM,YAAY;AAClB,QAAM,WAAW;AACjB,QAAM,WAAW;AACjB,QAAM,WACJ;AACF,QAAM,WAAW;AACjB,QAAM,WAAW;AACjB,QAAM,SAAS;AACf,QAAM,OAAO;AAEb,MAAI,cAAc;AAChB,WAAO,KAAK,WAAW,MAAM,aAAa;AAAA,EAC5C,OAAO;AACL,WAAO,KAAK,WAAW,MAAM,GAAG;AAAA,EAClC;AAEA,SAAO,KAAK,WAAW,UAAU,SAAS;AAC1C,SAAO,KAAK,QAAQ,UAAU,CAAC,UAAU,MAAM,WAAW,KAAK,OAAO,CAAC;AACvE,SAAO,KAAK,WAAW,IAAI,OAAO,GAAG,OAAO,MAAM,MAAM,OAAO,MAAM,IAAI,GAAG,GAAG,WAAW;AAC1F,SAAO,KAAK,WAAW,MAAM,CAAC,UAAU,QAAQ,OAAO,MAAM,MAAM,CAAC;AACpE,SAAO,KAAK,WAAW,SAAS,eAAe;AAC/C,SAAO,KAAK,WAAW,IAAI,OAAO,MAAM,UAAU,MAAM,QAAQ,GAAG,GAAG,WAAW;AACjF,SAAO,KAAK,WAAW,IAAI,OAAO,GAAG,SAAS,MAAM,IAAI,SAAS,MAAM,IAAI,GAAG,GAAG,aAAa;AAC9F,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,GAAG,UAAU,MAAM,MAAM,UAAU,MAAM,MAAM,UAAU,MAAM,OAAO,GAAG;AAAA,IACpF;AAAA,EACF;AACA,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,GAAG,UAAU,MAAM,MAAM,UAAU,MAAM,OAAO,GAAG;AAAA,IAC9D;AAAA,EACF;AACA,SAAO,KAAK;AAAA,IACV,IAAI,OAAO,IAAI,SAAS,MAAM,OAAO,SAAS,MAAM,IAAI,GAAG;AAAA,IAC3D;AAAA,EACF;AACA,SAAO,KAAK,WAAW,IAAI,OAAO,IAAI,SAAS,MAAM,OAAO,GAAG,GAAG,SAAS;AAC3E,SAAO,KAAK,WAAW,IAAI,OAAO,IAAI,UAAU,MAAM,OAAO,GAAG,GAAG,SAAS;AAC5E,SAAO,KAAK,WAAW,WAAM,SAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,WAAW,MAAM,IAAI;AACjC,SAAO,KAAK,QAAQ,eAAe,SAAS;AAC5C,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,KAAK,SAAS;AACrC,SAAO,KAAK,WAAW,SAAS,GAAG;AAEnC,MAAI,cAAc;AAChB,WAAO,KAAK,WAAW,SAAS,IAAI;AAAA,EACtC;AAEA,QAAM,QAAQ,KAAK,MAAM,QAAQ;AACjC,SAAO,KAAK,WAAW,UAAU,EAAE;AAEnC,QAAM,YAAwC,CAAC;AAC/C,MAAI,MAAM;AACV,MAAI,QAAQ;AACZ,MAAI,MAAM;AACV,QAAM,SAAS,eAAe,KAAK;AACnC,aAAW,SAAS,OAAO;AACzB,UAAM,WAAW,eAAe,QAAQ,MAAM,KAAK;AACnD,QAAI,CAAC,SAAU;AAEf,WAAO,SAAS;AAChB,WAAO,MAAM;AACb,QAAI,IAAI,SAAS,WAAW;AAC1B,gBAAU,KAAK,CAAC,IAAI,MAAM,OAAO,MAAM,GAAG,OAAO,GAAG,CAAC;AACrD,cAAQ;AACR,YAAM;AAAA,IACR;AAAA,EACF;AAEA,MAAI,KAAK;AACP,cAAU,KAAK,CAAC,IAAI,MAAM,OAAO,MAAM,GAAG,OAAO,KAAK,SAAS,CAAC,CAAC;AAAA,EACnE;AAEA,SAAO;AACT;","names":[]}
@@ -2,13 +2,15 @@
2
2
  var import_vitest = require("vitest");
3
3
  var import_basic = require("./basic/index.cjs");
4
4
  var import_paragraph = require("./basic/paragraph.cjs");
5
- const TEXT = "Hi! LiveKit is a platform for live audio and video applications and services. R.T.C stands for Real-Time Communication... again R.T.C. Mr. Theo is testing the sentence tokenizer. This is a test. Another test. A short sentence. A longer sentence that is longer than the previous sentence. f(x) = x * 2.54 + 42. Hey! Hi! Hello! ";
5
+ const TEXT = "Hi! LiveKit is a platform for live audio and video applications and services. R.T.C stands for Real-Time Communication... again R.T.C. Mr. Theo is testing the sentence tokenizer. This is a test. Another test. A short sentence. A longer sentence that is longer than the previous sentence. Find additional resources on livekit.com. Find additional resources on docs.livekit.com. f(x) = x * 2.54 + 42. Hey! Hi! Hello! ";
6
6
  const EXPECTED_MIN_20 = [
7
7
  "Hi! LiveKit is a platform for live audio and video applications and services.",
8
8
  "R.T.C stands for Real-Time Communication... again R.T.C.",
9
9
  "Mr. Theo is testing the sentence tokenizer.",
10
10
  "This is a test. Another test.",
11
11
  "A short sentence. A longer sentence that is longer than the previous sentence.",
12
+ "Find additional resources on livekit.com.",
13
+ "Find additional resources on docs.livekit.com.",
12
14
  "f(x) = x * 2.54 + 42.",
13
15
  "Hey! Hi! Hello!"
14
16
  ];
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/tokenize/tokenizer.test.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { describe, expect, it } from 'vitest';\nimport { SentenceTokenizer, WordTokenizer, hyphenateWord } from './basic/index.js';\nimport { splitParagraphs } from './basic/paragraph.js';\n\nconst TEXT =\n 'Hi! ' +\n 'LiveKit is a platform for live audio and video applications and services. ' +\n 'R.T.C stands for Real-Time Communication... again R.T.C. ' +\n 'Mr. Theo is testing the sentence tokenizer. ' +\n 'This is a test. Another test. ' +\n 'A short sentence. ' +\n 'A longer sentence that is longer than the previous sentence. ' +\n 'f(x) = x * 2.54 + 42. ' +\n 'Hey! Hi! Hello! ';\n\nconst EXPECTED_MIN_20 = [\n 'Hi! LiveKit is a platform for live audio and video applications and services.',\n 'R.T.C stands for Real-Time Communication... again R.T.C.',\n 'Mr. Theo is testing the sentence tokenizer.',\n 'This is a test. Another test.',\n 'A short sentence. A longer sentence that is longer than the previous sentence.',\n 'f(x) = x * 2.54 + 42.',\n 'Hey! Hi! Hello!',\n];\n\nconst WORDS_TEXT = 'This is a test. Blabla another test! multiple consecutive spaces: done';\nconst WORDS_EXPECTED = [\n 'This',\n 'is',\n 'a',\n 'test',\n 'Blabla',\n 'another',\n 'test',\n 'multiple',\n 'consecutive',\n 'spaces',\n 'done',\n];\n\nconst WORDS_PUNCT_TEXT =\n 'This is <phoneme alphabet=\"cmu-arpabet\" ph=\"AE K CH UW AH L IY\">actually</phoneme> tricky to handle.';\nconst WORDS_PUNCT_EXPECTED = [\n 'This',\n 'is',\n '<phoneme',\n 'alphabet=\"cmu-arpabet\"',\n 'ph=\"AE',\n 'K',\n 'CH',\n 'UW',\n 'AH',\n 'L',\n 'IY\">actually</phoneme>',\n 'tricky',\n 'to',\n 'handle.',\n];\n\nconst HYPHENATOR_TEXT = ['Segment', 'expected', 'communication', 'window', 'welcome', 'bedroom'];\nconst HYPHENATOR_EXPECTED = [\n ['Seg', 'ment'],\n ['ex', 'pect', 'ed'],\n ['com', 'mu', 'ni', 'ca', 'tion'],\n ['win', 'dow'],\n ['wel', 'come'],\n ['bed', 'room'],\n];\n\nconst PARAGRAPH_TEST_CASES: [string, [string, number, number][]][] = [\n ['Single paragraph.', [['Single paragraph.', 0, 17]]],\n [\n 'Paragraph 1.\\n\\nParagraph 2.',\n [\n ['Paragraph 1.', 0, 12],\n ['Paragraph 2.', 14, 26],\n ],\n ],\n [\n 'Para 1.\\n\\nPara 2.\\n\\nPara 3.',\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 9, 16],\n ['Para 3.', 18, 25],\n ],\n ],\n ['\\n\\nParagraph with leading newlines.', [['Paragraph with leading newlines.', 2, 34]]],\n ['Paragraph with trailing newlines.\\n\\n', [['Paragraph with trailing newlines.', 0, 33]]],\n [\n '\\n\\n Paragraph with leading and trailing spaces. \\n\\n',\n [['Paragraph with leading and trailing spaces.', 4, 47]],\n ],\n [\n 'Para 1.\\n\\n\\n\\nPara 2.', // Multiple newlines between paragraphs\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 11, 18],\n ],\n ],\n [\n 'Para 1.\\n \\n \\nPara 2.', // Newlines with spaces between paragraphs\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 12, 19],\n ],\n ],\n [\n '', // Empty string\n [],\n ],\n [\n '\\n\\n\\n', // Only newlines\n [],\n ],\n [\n 'Line 1\\nLine 2\\nLine 3', // Single paragraph with newlines\n [['Line 1\\nLine 2\\nLine 3', 0, 20]],\n ],\n];\n\ndescribe('tokenizer', () => {\n describe('SentenceTokenizer', () => {\n const tokenizer = new SentenceTokenizer();\n\n it('should tokenize sentences correctly', () => {\n expect(tokenizer.tokenize(TEXT).every((x, i) => EXPECTED_MIN_20[i] === x)).toBeTruthy();\n });\n\n it('should stream tokenize sentences correctly', async () => {\n const pattern = [1, 2, 4];\n let text = TEXT;\n const chunks = [];\n const patternIter = Array(Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)))\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizer.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of EXPECTED_MIN_20) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n });\n describe('WordTokenizer', () => {\n const tokenizer = new WordTokenizer();\n\n it('should tokenize words correctly', () => {\n expect(tokenizer.tokenize(WORDS_TEXT).every((x, i) => WORDS_EXPECTED[i] === x)).toBeTruthy();\n });\n\n it('should stream tokenize words correctly', async () => {\n const pattern = [1, 2, 4];\n let text = WORDS_TEXT;\n const chunks = [];\n const patternIter = Array(Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)))\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizer.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of WORDS_EXPECTED) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n\n describe('punctuation handling', () => {\n const tokenizerPunct = new WordTokenizer(false);\n\n it('should tokenize words correctly', () => {\n expect(\n tokenizerPunct.tokenize(WORDS_PUNCT_TEXT).every((x, i) => WORDS_PUNCT_EXPECTED[i] === x),\n ).toBeTruthy();\n });\n\n it('should stream tokenize words correctly', async () => {\n const pattern = [1, 2, 4];\n let text = WORDS_PUNCT_TEXT;\n const chunks = [];\n const patternIter = Array(\n Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)),\n )\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizerPunct.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of WORDS_PUNCT_EXPECTED) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n });\n });\n describe('hyphenateWord', () => {\n it('should hyphenate correctly', () => {\n HYPHENATOR_TEXT.forEach((x, i) => {\n expect(hyphenateWord(x)).toStrictEqual(HYPHENATOR_EXPECTED[i]);\n });\n });\n });\n describe('splitParagraphs', () => {\n it('should tokenize paragraphs correctly', () => {\n PARAGRAPH_TEST_CASES.forEach(([a, b]) => {\n expect(splitParagraphs(a)).toStrictEqual(b);\n });\n });\n });\n});\n"],"mappings":";AAGA,oBAAqC;AACrC,mBAAgE;AAChE,uBAAgC;AAEhC,MAAM,OACJ;AAUF,MAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,aAAa;AACnB,MAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,mBACJ;AACF,MAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,kBAAkB,CAAC,WAAW,YAAY,iBAAiB,UAAU,WAAW,SAAS;AAC/F,MAAM,sBAAsB;AAAA,EAC1B,CAAC,OAAO,MAAM;AAAA,EACd,CAAC,MAAM,QAAQ,IAAI;AAAA,EACnB,CAAC,OAAO,MAAM,MAAM,MAAM,MAAM;AAAA,EAChC,CAAC,OAAO,KAAK;AAAA,EACb,CAAC,OAAO,MAAM;AAAA,EACd,CAAC,OAAO,MAAM;AAChB;AAEA,MAAM,uBAA+D;AAAA,EACnE,CAAC,qBAAqB,CAAC,CAAC,qBAAqB,GAAG,EAAE,CAAC,CAAC;AAAA,EACpD;AAAA,IACE;AAAA,IACA;AAAA,MACE,CAAC,gBAAgB,GAAG,EAAE;AAAA,MACtB,CAAC,gBAAgB,IAAI,EAAE;AAAA,IACzB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,GAAG,EAAE;AAAA,MACjB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA,CAAC,wCAAwC,CAAC,CAAC,oCAAoC,GAAG,EAAE,CAAC,CAAC;AAAA,EACtF,CAAC,yCAAyC,CAAC,CAAC,qCAAqC,GAAG,EAAE,CAAC,CAAC;AAAA,EACxF;AAAA,IACE;AAAA,IACA,CAAC,CAAC,+CAA+C,GAAG,EAAE,CAAC;AAAA,EACzD;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC;AAAA,EACH;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC;AAAA,EACH;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC,CAAC,0BAA0B,GAAG,EAAE,CAAC;AAAA,EACpC;AACF;AAAA,IAEA,wBAAS,aAAa,MAAM;AAC1B,8BAAS,qBAAqB,MAAM;AAClC,UAAM,YAAY,IAAI,+BAAkB;AAExC,0BAAG,uCAAuC,MAAM;AAC9C,gCAAO,UAAU,SAAS,IAAI,EAAE,MAAM,CAAC,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW;AAAA,IACxF,CAAC;AAED,0BAAG,8CAA8C,YAAY;AAC3D,YAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,UAAI,OAAO;AACX,YAAM,SAAS,CAAC;AAChB,YAAM,cAAc,MAAM,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC,CAAC,EAC1F,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,iBAAW,QAAQ,aAAa;AAC9B,YAAI,CAAC,KAAM;AACX,eAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AACA,YAAM,SAAS,UAAU,OAAO;AAChC,iBAAW,SAAS,QAAQ;AAC1B,eAAO,SAAS,KAAK;AAAA,MACvB;AACA,aAAO,SAAS;AAChB,aAAO,MAAM;AAEb,iBAAW,KAAK,iBAAiB;AAC/B,cAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,cAAI,MAAM,OAAO;AACf,sCAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,UAC3C;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,8BAAS,iBAAiB,MAAM;AAC9B,UAAM,YAAY,IAAI,2BAAc;AAEpC,0BAAG,mCAAmC,MAAM;AAC1C,gCAAO,UAAU,SAAS,UAAU,EAAE,MAAM,CAAC,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW;AAAA,IAC7F,CAAC;AAED,0BAAG,0CAA0C,YAAY;AACvD,YAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,UAAI,OAAO;AACX,YAAM,SAAS,CAAC;AAChB,YAAM,cAAc,MAAM,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC,CAAC,EAC1F,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,iBAAW,QAAQ,aAAa;AAC9B,YAAI,CAAC,KAAM;AACX,eAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AACA,YAAM,SAAS,UAAU,OAAO;AAChC,iBAAW,SAAS,QAAQ;AAC1B,eAAO,SAAS,KAAK;AAAA,MACvB;AACA,aAAO,SAAS;AAChB,aAAO,MAAM;AAEb,iBAAW,KAAK,gBAAgB;AAC9B,cAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,cAAI,MAAM,OAAO;AACf,sCAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,UAC3C;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAED,gCAAS,wBAAwB,MAAM;AACrC,YAAM,iBAAiB,IAAI,2BAAc,KAAK;AAE9C,4BAAG,mCAAmC,MAAM;AAC1C;AAAA,UACE,eAAe,SAAS,gBAAgB,EAAE,MAAM,CAAC,GAAG,MAAM,qBAAqB,CAAC,MAAM,CAAC;AAAA,QACzF,EAAE,WAAW;AAAA,MACf,CAAC;AAED,4BAAG,0CAA0C,YAAY;AACvD,cAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,YAAI,OAAO;AACX,cAAM,SAAS,CAAC;AAChB,cAAM,cAAc;AAAA,UAClB,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC;AAAA,QACpE,EACG,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,mBAAW,QAAQ,aAAa;AAC9B,cAAI,CAAC,KAAM;AACX,iBAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,iBAAO,KAAK,MAAM,IAAI;AAAA,QACxB;AACA,cAAM,SAAS,eAAe,OAAO;AACrC,mBAAW,SAAS,QAAQ;AAC1B,iBAAO,SAAS,KAAK;AAAA,QACvB;AACA,eAAO,SAAS;AAChB,eAAO,MAAM;AAEb,mBAAW,KAAK,sBAAsB;AACpC,gBAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,gBAAI,MAAM,OAAO;AACf,wCAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,YAC3C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACD,8BAAS,iBAAiB,MAAM;AAC9B,0BAAG,8BAA8B,MAAM;AACrC,sBAAgB,QAAQ,CAAC,GAAG,MAAM;AAChC,sCAAO,4BAAc,CAAC,CAAC,EAAE,cAAc,oBAAoB,CAAC,CAAC;AAAA,MAC/D,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACD,8BAAS,mBAAmB,MAAM;AAChC,0BAAG,wCAAwC,MAAM;AAC/C,2BAAqB,QAAQ,CAAC,CAAC,GAAG,CAAC,MAAM;AACvC,sCAAO,kCAAgB,CAAC,CAAC,EAAE,cAAc,CAAC;AAAA,MAC5C,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH,CAAC;","names":[]}
1
+ {"version":3,"sources":["../../src/tokenize/tokenizer.test.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { describe, expect, it } from 'vitest';\nimport { SentenceTokenizer, WordTokenizer, hyphenateWord } from './basic/index.js';\nimport { splitParagraphs } from './basic/paragraph.js';\n\nconst TEXT =\n 'Hi! ' +\n 'LiveKit is a platform for live audio and video applications and services. ' +\n 'R.T.C stands for Real-Time Communication... again R.T.C. ' +\n 'Mr. Theo is testing the sentence tokenizer. ' +\n 'This is a test. Another test. ' +\n 'A short sentence. ' +\n 'A longer sentence that is longer than the previous sentence. ' +\n 'Find additional resources on livekit.com. ' +\n 'Find additional resources on docs.livekit.com. ' +\n 'f(x) = x * 2.54 + 42. ' +\n 'Hey! Hi! Hello! ';\n\nconst EXPECTED_MIN_20 = [\n 'Hi! LiveKit is a platform for live audio and video applications and services.',\n 'R.T.C stands for Real-Time Communication... again R.T.C.',\n 'Mr. Theo is testing the sentence tokenizer.',\n 'This is a test. Another test.',\n 'A short sentence. A longer sentence that is longer than the previous sentence.',\n 'Find additional resources on livekit.com.',\n 'Find additional resources on docs.livekit.com.',\n 'f(x) = x * 2.54 + 42.',\n 'Hey! Hi! Hello!',\n];\n\nconst WORDS_TEXT = 'This is a test. Blabla another test! multiple consecutive spaces: done';\nconst WORDS_EXPECTED = [\n 'This',\n 'is',\n 'a',\n 'test',\n 'Blabla',\n 'another',\n 'test',\n 'multiple',\n 'consecutive',\n 'spaces',\n 'done',\n];\n\nconst WORDS_PUNCT_TEXT =\n 'This is <phoneme alphabet=\"cmu-arpabet\" ph=\"AE K CH UW AH L IY\">actually</phoneme> tricky to handle.';\nconst WORDS_PUNCT_EXPECTED = [\n 'This',\n 'is',\n '<phoneme',\n 'alphabet=\"cmu-arpabet\"',\n 'ph=\"AE',\n 'K',\n 'CH',\n 'UW',\n 'AH',\n 'L',\n 'IY\">actually</phoneme>',\n 'tricky',\n 'to',\n 'handle.',\n];\n\nconst HYPHENATOR_TEXT = ['Segment', 'expected', 'communication', 'window', 'welcome', 'bedroom'];\nconst HYPHENATOR_EXPECTED = [\n ['Seg', 'ment'],\n ['ex', 'pect', 'ed'],\n ['com', 'mu', 'ni', 'ca', 'tion'],\n ['win', 'dow'],\n ['wel', 'come'],\n ['bed', 'room'],\n];\n\nconst PARAGRAPH_TEST_CASES: [string, [string, number, number][]][] = [\n ['Single paragraph.', [['Single paragraph.', 0, 17]]],\n [\n 'Paragraph 1.\\n\\nParagraph 2.',\n [\n ['Paragraph 1.', 0, 12],\n ['Paragraph 2.', 14, 26],\n ],\n ],\n [\n 'Para 1.\\n\\nPara 2.\\n\\nPara 3.',\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 9, 16],\n ['Para 3.', 18, 25],\n ],\n ],\n ['\\n\\nParagraph with leading newlines.', [['Paragraph with leading newlines.', 2, 34]]],\n ['Paragraph with trailing newlines.\\n\\n', [['Paragraph with trailing newlines.', 0, 33]]],\n [\n '\\n\\n Paragraph with leading and trailing spaces. \\n\\n',\n [['Paragraph with leading and trailing spaces.', 4, 47]],\n ],\n [\n 'Para 1.\\n\\n\\n\\nPara 2.', // Multiple newlines between paragraphs\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 11, 18],\n ],\n ],\n [\n 'Para 1.\\n \\n \\nPara 2.', // Newlines with spaces between paragraphs\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 12, 19],\n ],\n ],\n [\n '', // Empty string\n [],\n ],\n [\n '\\n\\n\\n', // Only newlines\n [],\n ],\n [\n 'Line 1\\nLine 2\\nLine 3', // Single paragraph with newlines\n [['Line 1\\nLine 2\\nLine 3', 0, 20]],\n ],\n];\n\ndescribe('tokenizer', () => {\n describe('SentenceTokenizer', () => {\n const tokenizer = new SentenceTokenizer();\n\n it('should tokenize sentences correctly', () => {\n expect(tokenizer.tokenize(TEXT).every((x, i) => EXPECTED_MIN_20[i] === x)).toBeTruthy();\n });\n\n it('should stream tokenize sentences correctly', async () => {\n const pattern = [1, 2, 4];\n let text = TEXT;\n const chunks = [];\n const patternIter = Array(Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)))\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizer.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of EXPECTED_MIN_20) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n });\n describe('WordTokenizer', () => {\n const tokenizer = new WordTokenizer();\n\n it('should tokenize words correctly', () => {\n expect(tokenizer.tokenize(WORDS_TEXT).every((x, i) => WORDS_EXPECTED[i] === x)).toBeTruthy();\n });\n\n it('should stream tokenize words correctly', async () => {\n const pattern = [1, 2, 4];\n let text = WORDS_TEXT;\n const chunks = [];\n const patternIter = Array(Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)))\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizer.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of WORDS_EXPECTED) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n\n describe('punctuation handling', () => {\n const tokenizerPunct = new WordTokenizer(false);\n\n it('should tokenize words correctly', () => {\n expect(\n tokenizerPunct.tokenize(WORDS_PUNCT_TEXT).every((x, i) => WORDS_PUNCT_EXPECTED[i] === x),\n ).toBeTruthy();\n });\n\n it('should stream tokenize words correctly', async () => {\n const pattern = [1, 2, 4];\n let text = WORDS_PUNCT_TEXT;\n const chunks = [];\n const patternIter = Array(\n Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)),\n )\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizerPunct.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of WORDS_PUNCT_EXPECTED) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n });\n });\n describe('hyphenateWord', () => {\n it('should hyphenate correctly', () => {\n HYPHENATOR_TEXT.forEach((x, i) => {\n expect(hyphenateWord(x)).toStrictEqual(HYPHENATOR_EXPECTED[i]);\n });\n });\n });\n describe('splitParagraphs', () => {\n it('should tokenize paragraphs correctly', () => {\n PARAGRAPH_TEST_CASES.forEach(([a, b]) => {\n expect(splitParagraphs(a)).toStrictEqual(b);\n });\n });\n });\n});\n"],"mappings":";AAGA,oBAAqC;AACrC,mBAAgE;AAChE,uBAAgC;AAEhC,MAAM,OACJ;AAYF,MAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,aAAa;AACnB,MAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,mBACJ;AACF,MAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,kBAAkB,CAAC,WAAW,YAAY,iBAAiB,UAAU,WAAW,SAAS;AAC/F,MAAM,sBAAsB;AAAA,EAC1B,CAAC,OAAO,MAAM;AAAA,EACd,CAAC,MAAM,QAAQ,IAAI;AAAA,EACnB,CAAC,OAAO,MAAM,MAAM,MAAM,MAAM;AAAA,EAChC,CAAC,OAAO,KAAK;AAAA,EACb,CAAC,OAAO,MAAM;AAAA,EACd,CAAC,OAAO,MAAM;AAChB;AAEA,MAAM,uBAA+D;AAAA,EACnE,CAAC,qBAAqB,CAAC,CAAC,qBAAqB,GAAG,EAAE,CAAC,CAAC;AAAA,EACpD;AAAA,IACE;AAAA,IACA;AAAA,MACE,CAAC,gBAAgB,GAAG,EAAE;AAAA,MACtB,CAAC,gBAAgB,IAAI,EAAE;AAAA,IACzB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,GAAG,EAAE;AAAA,MACjB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA,CAAC,wCAAwC,CAAC,CAAC,oCAAoC,GAAG,EAAE,CAAC,CAAC;AAAA,EACtF,CAAC,yCAAyC,CAAC,CAAC,qCAAqC,GAAG,EAAE,CAAC,CAAC;AAAA,EACxF;AAAA,IACE;AAAA,IACA,CAAC,CAAC,+CAA+C,GAAG,EAAE,CAAC;AAAA,EACzD;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC;AAAA,EACH;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC;AAAA,EACH;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC,CAAC,0BAA0B,GAAG,EAAE,CAAC;AAAA,EACpC;AACF;AAAA,IAEA,wBAAS,aAAa,MAAM;AAC1B,8BAAS,qBAAqB,MAAM;AAClC,UAAM,YAAY,IAAI,+BAAkB;AAExC,0BAAG,uCAAuC,MAAM;AAC9C,gCAAO,UAAU,SAAS,IAAI,EAAE,MAAM,CAAC,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW;AAAA,IACxF,CAAC;AAED,0BAAG,8CAA8C,YAAY;AAC3D,YAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,UAAI,OAAO;AACX,YAAM,SAAS,CAAC;AAChB,YAAM,cAAc,MAAM,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC,CAAC,EAC1F,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,iBAAW,QAAQ,aAAa;AAC9B,YAAI,CAAC,KAAM;AACX,eAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AACA,YAAM,SAAS,UAAU,OAAO;AAChC,iBAAW,SAAS,QAAQ;AAC1B,eAAO,SAAS,KAAK;AAAA,MACvB;AACA,aAAO,SAAS;AAChB,aAAO,MAAM;AAEb,iBAAW,KAAK,iBAAiB;AAC/B,cAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,cAAI,MAAM,OAAO;AACf,sCAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,UAC3C;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,8BAAS,iBAAiB,MAAM;AAC9B,UAAM,YAAY,IAAI,2BAAc;AAEpC,0BAAG,mCAAmC,MAAM;AAC1C,gCAAO,UAAU,SAAS,UAAU,EAAE,MAAM,CAAC,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW;AAAA,IAC7F,CAAC;AAED,0BAAG,0CAA0C,YAAY;AACvD,YAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,UAAI,OAAO;AACX,YAAM,SAAS,CAAC;AAChB,YAAM,cAAc,MAAM,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC,CAAC,EAC1F,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,iBAAW,QAAQ,aAAa;AAC9B,YAAI,CAAC,KAAM;AACX,eAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AACA,YAAM,SAAS,UAAU,OAAO;AAChC,iBAAW,SAAS,QAAQ;AAC1B,eAAO,SAAS,KAAK;AAAA,MACvB;AACA,aAAO,SAAS;AAChB,aAAO,MAAM;AAEb,iBAAW,KAAK,gBAAgB;AAC9B,cAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,cAAI,MAAM,OAAO;AACf,sCAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,UAC3C;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAED,gCAAS,wBAAwB,MAAM;AACrC,YAAM,iBAAiB,IAAI,2BAAc,KAAK;AAE9C,4BAAG,mCAAmC,MAAM;AAC1C;AAAA,UACE,eAAe,SAAS,gBAAgB,EAAE,MAAM,CAAC,GAAG,MAAM,qBAAqB,CAAC,MAAM,CAAC;AAAA,QACzF,EAAE,WAAW;AAAA,MACf,CAAC;AAED,4BAAG,0CAA0C,YAAY;AACvD,cAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,YAAI,OAAO;AACX,cAAM,SAAS,CAAC;AAChB,cAAM,cAAc;AAAA,UAClB,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC;AAAA,QACpE,EACG,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,mBAAW,QAAQ,aAAa;AAC9B,cAAI,CAAC,KAAM;AACX,iBAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,iBAAO,KAAK,MAAM,IAAI;AAAA,QACxB;AACA,cAAM,SAAS,eAAe,OAAO;AACrC,mBAAW,SAAS,QAAQ;AAC1B,iBAAO,SAAS,KAAK;AAAA,QACvB;AACA,eAAO,SAAS;AAChB,eAAO,MAAM;AAEb,mBAAW,KAAK,sBAAsB;AACpC,gBAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,gBAAI,MAAM,OAAO;AACf,wCAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,YAC3C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACD,8BAAS,iBAAiB,MAAM;AAC9B,0BAAG,8BAA8B,MAAM;AACrC,sBAAgB,QAAQ,CAAC,GAAG,MAAM;AAChC,sCAAO,4BAAc,CAAC,CAAC,EAAE,cAAc,oBAAoB,CAAC,CAAC;AAAA,MAC/D,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACD,8BAAS,mBAAmB,MAAM;AAChC,0BAAG,wCAAwC,MAAM;AAC/C,2BAAqB,QAAQ,CAAC,CAAC,GAAG,CAAC,MAAM;AACvC,sCAAO,kCAAgB,CAAC,CAAC,EAAE,cAAc,CAAC;AAAA,MAC5C,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH,CAAC;","names":[]}
@@ -1,13 +1,15 @@
1
1
  import { describe, expect, it } from "vitest";
2
2
  import { SentenceTokenizer, WordTokenizer, hyphenateWord } from "./basic/index.js";
3
3
  import { splitParagraphs } from "./basic/paragraph.js";
4
- const TEXT = "Hi! LiveKit is a platform for live audio and video applications and services. R.T.C stands for Real-Time Communication... again R.T.C. Mr. Theo is testing the sentence tokenizer. This is a test. Another test. A short sentence. A longer sentence that is longer than the previous sentence. f(x) = x * 2.54 + 42. Hey! Hi! Hello! ";
4
+ const TEXT = "Hi! LiveKit is a platform for live audio and video applications and services. R.T.C stands for Real-Time Communication... again R.T.C. Mr. Theo is testing the sentence tokenizer. This is a test. Another test. A short sentence. A longer sentence that is longer than the previous sentence. Find additional resources on livekit.com. Find additional resources on docs.livekit.com. f(x) = x * 2.54 + 42. Hey! Hi! Hello! ";
5
5
  const EXPECTED_MIN_20 = [
6
6
  "Hi! LiveKit is a platform for live audio and video applications and services.",
7
7
  "R.T.C stands for Real-Time Communication... again R.T.C.",
8
8
  "Mr. Theo is testing the sentence tokenizer.",
9
9
  "This is a test. Another test.",
10
10
  "A short sentence. A longer sentence that is longer than the previous sentence.",
11
+ "Find additional resources on livekit.com.",
12
+ "Find additional resources on docs.livekit.com.",
11
13
  "f(x) = x * 2.54 + 42.",
12
14
  "Hey! Hi! Hello!"
13
15
  ];
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/tokenize/tokenizer.test.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { describe, expect, it } from 'vitest';\nimport { SentenceTokenizer, WordTokenizer, hyphenateWord } from './basic/index.js';\nimport { splitParagraphs } from './basic/paragraph.js';\n\nconst TEXT =\n 'Hi! ' +\n 'LiveKit is a platform for live audio and video applications and services. ' +\n 'R.T.C stands for Real-Time Communication... again R.T.C. ' +\n 'Mr. Theo is testing the sentence tokenizer. ' +\n 'This is a test. Another test. ' +\n 'A short sentence. ' +\n 'A longer sentence that is longer than the previous sentence. ' +\n 'f(x) = x * 2.54 + 42. ' +\n 'Hey! Hi! Hello! ';\n\nconst EXPECTED_MIN_20 = [\n 'Hi! LiveKit is a platform for live audio and video applications and services.',\n 'R.T.C stands for Real-Time Communication... again R.T.C.',\n 'Mr. Theo is testing the sentence tokenizer.',\n 'This is a test. Another test.',\n 'A short sentence. A longer sentence that is longer than the previous sentence.',\n 'f(x) = x * 2.54 + 42.',\n 'Hey! Hi! Hello!',\n];\n\nconst WORDS_TEXT = 'This is a test. Blabla another test! multiple consecutive spaces: done';\nconst WORDS_EXPECTED = [\n 'This',\n 'is',\n 'a',\n 'test',\n 'Blabla',\n 'another',\n 'test',\n 'multiple',\n 'consecutive',\n 'spaces',\n 'done',\n];\n\nconst WORDS_PUNCT_TEXT =\n 'This is <phoneme alphabet=\"cmu-arpabet\" ph=\"AE K CH UW AH L IY\">actually</phoneme> tricky to handle.';\nconst WORDS_PUNCT_EXPECTED = [\n 'This',\n 'is',\n '<phoneme',\n 'alphabet=\"cmu-arpabet\"',\n 'ph=\"AE',\n 'K',\n 'CH',\n 'UW',\n 'AH',\n 'L',\n 'IY\">actually</phoneme>',\n 'tricky',\n 'to',\n 'handle.',\n];\n\nconst HYPHENATOR_TEXT = ['Segment', 'expected', 'communication', 'window', 'welcome', 'bedroom'];\nconst HYPHENATOR_EXPECTED = [\n ['Seg', 'ment'],\n ['ex', 'pect', 'ed'],\n ['com', 'mu', 'ni', 'ca', 'tion'],\n ['win', 'dow'],\n ['wel', 'come'],\n ['bed', 'room'],\n];\n\nconst PARAGRAPH_TEST_CASES: [string, [string, number, number][]][] = [\n ['Single paragraph.', [['Single paragraph.', 0, 17]]],\n [\n 'Paragraph 1.\\n\\nParagraph 2.',\n [\n ['Paragraph 1.', 0, 12],\n ['Paragraph 2.', 14, 26],\n ],\n ],\n [\n 'Para 1.\\n\\nPara 2.\\n\\nPara 3.',\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 9, 16],\n ['Para 3.', 18, 25],\n ],\n ],\n ['\\n\\nParagraph with leading newlines.', [['Paragraph with leading newlines.', 2, 34]]],\n ['Paragraph with trailing newlines.\\n\\n', [['Paragraph with trailing newlines.', 0, 33]]],\n [\n '\\n\\n Paragraph with leading and trailing spaces. \\n\\n',\n [['Paragraph with leading and trailing spaces.', 4, 47]],\n ],\n [\n 'Para 1.\\n\\n\\n\\nPara 2.', // Multiple newlines between paragraphs\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 11, 18],\n ],\n ],\n [\n 'Para 1.\\n \\n \\nPara 2.', // Newlines with spaces between paragraphs\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 12, 19],\n ],\n ],\n [\n '', // Empty string\n [],\n ],\n [\n '\\n\\n\\n', // Only newlines\n [],\n ],\n [\n 'Line 1\\nLine 2\\nLine 3', // Single paragraph with newlines\n [['Line 1\\nLine 2\\nLine 3', 0, 20]],\n ],\n];\n\ndescribe('tokenizer', () => {\n describe('SentenceTokenizer', () => {\n const tokenizer = new SentenceTokenizer();\n\n it('should tokenize sentences correctly', () => {\n expect(tokenizer.tokenize(TEXT).every((x, i) => EXPECTED_MIN_20[i] === x)).toBeTruthy();\n });\n\n it('should stream tokenize sentences correctly', async () => {\n const pattern = [1, 2, 4];\n let text = TEXT;\n const chunks = [];\n const patternIter = Array(Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)))\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizer.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of EXPECTED_MIN_20) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n });\n describe('WordTokenizer', () => {\n const tokenizer = new WordTokenizer();\n\n it('should tokenize words correctly', () => {\n expect(tokenizer.tokenize(WORDS_TEXT).every((x, i) => WORDS_EXPECTED[i] === x)).toBeTruthy();\n });\n\n it('should stream tokenize words correctly', async () => {\n const pattern = [1, 2, 4];\n let text = WORDS_TEXT;\n const chunks = [];\n const patternIter = Array(Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)))\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizer.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of WORDS_EXPECTED) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n\n describe('punctuation handling', () => {\n const tokenizerPunct = new WordTokenizer(false);\n\n it('should tokenize words correctly', () => {\n expect(\n tokenizerPunct.tokenize(WORDS_PUNCT_TEXT).every((x, i) => WORDS_PUNCT_EXPECTED[i] === x),\n ).toBeTruthy();\n });\n\n it('should stream tokenize words correctly', async () => {\n const pattern = [1, 2, 4];\n let text = WORDS_PUNCT_TEXT;\n const chunks = [];\n const patternIter = Array(\n Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)),\n )\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizerPunct.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of WORDS_PUNCT_EXPECTED) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n });\n });\n describe('hyphenateWord', () => {\n it('should hyphenate correctly', () => {\n HYPHENATOR_TEXT.forEach((x, i) => {\n expect(hyphenateWord(x)).toStrictEqual(HYPHENATOR_EXPECTED[i]);\n });\n });\n });\n describe('splitParagraphs', () => {\n it('should tokenize paragraphs correctly', () => {\n PARAGRAPH_TEST_CASES.forEach(([a, b]) => {\n expect(splitParagraphs(a)).toStrictEqual(b);\n });\n });\n });\n});\n"],"mappings":"AAGA,SAAS,UAAU,QAAQ,UAAU;AACrC,SAAS,mBAAmB,eAAe,qBAAqB;AAChE,SAAS,uBAAuB;AAEhC,MAAM,OACJ;AAUF,MAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,aAAa;AACnB,MAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,mBACJ;AACF,MAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,kBAAkB,CAAC,WAAW,YAAY,iBAAiB,UAAU,WAAW,SAAS;AAC/F,MAAM,sBAAsB;AAAA,EAC1B,CAAC,OAAO,MAAM;AAAA,EACd,CAAC,MAAM,QAAQ,IAAI;AAAA,EACnB,CAAC,OAAO,MAAM,MAAM,MAAM,MAAM;AAAA,EAChC,CAAC,OAAO,KAAK;AAAA,EACb,CAAC,OAAO,MAAM;AAAA,EACd,CAAC,OAAO,MAAM;AAChB;AAEA,MAAM,uBAA+D;AAAA,EACnE,CAAC,qBAAqB,CAAC,CAAC,qBAAqB,GAAG,EAAE,CAAC,CAAC;AAAA,EACpD;AAAA,IACE;AAAA,IACA;AAAA,MACE,CAAC,gBAAgB,GAAG,EAAE;AAAA,MACtB,CAAC,gBAAgB,IAAI,EAAE;AAAA,IACzB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,GAAG,EAAE;AAAA,MACjB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA,CAAC,wCAAwC,CAAC,CAAC,oCAAoC,GAAG,EAAE,CAAC,CAAC;AAAA,EACtF,CAAC,yCAAyC,CAAC,CAAC,qCAAqC,GAAG,EAAE,CAAC,CAAC;AAAA,EACxF;AAAA,IACE;AAAA,IACA,CAAC,CAAC,+CAA+C,GAAG,EAAE,CAAC;AAAA,EACzD;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC;AAAA,EACH;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC;AAAA,EACH;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC,CAAC,0BAA0B,GAAG,EAAE,CAAC;AAAA,EACpC;AACF;AAEA,SAAS,aAAa,MAAM;AAC1B,WAAS,qBAAqB,MAAM;AAClC,UAAM,YAAY,IAAI,kBAAkB;AAExC,OAAG,uCAAuC,MAAM;AAC9C,aAAO,UAAU,SAAS,IAAI,EAAE,MAAM,CAAC,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW;AAAA,IACxF,CAAC;AAED,OAAG,8CAA8C,YAAY;AAC3D,YAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,UAAI,OAAO;AACX,YAAM,SAAS,CAAC;AAChB,YAAM,cAAc,MAAM,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC,CAAC,EAC1F,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,iBAAW,QAAQ,aAAa;AAC9B,YAAI,CAAC,KAAM;AACX,eAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AACA,YAAM,SAAS,UAAU,OAAO;AAChC,iBAAW,SAAS,QAAQ;AAC1B,eAAO,SAAS,KAAK;AAAA,MACvB;AACA,aAAO,SAAS;AAChB,aAAO,MAAM;AAEb,iBAAW,KAAK,iBAAiB;AAC/B,cAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,cAAI,MAAM,OAAO;AACf,mBAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,UAC3C;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,WAAS,iBAAiB,MAAM;AAC9B,UAAM,YAAY,IAAI,cAAc;AAEpC,OAAG,mCAAmC,MAAM;AAC1C,aAAO,UAAU,SAAS,UAAU,EAAE,MAAM,CAAC,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW;AAAA,IAC7F,CAAC;AAED,OAAG,0CAA0C,YAAY;AACvD,YAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,UAAI,OAAO;AACX,YAAM,SAAS,CAAC;AAChB,YAAM,cAAc,MAAM,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC,CAAC,EAC1F,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,iBAAW,QAAQ,aAAa;AAC9B,YAAI,CAAC,KAAM;AACX,eAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AACA,YAAM,SAAS,UAAU,OAAO;AAChC,iBAAW,SAAS,QAAQ;AAC1B,eAAO,SAAS,KAAK;AAAA,MACvB;AACA,aAAO,SAAS;AAChB,aAAO,MAAM;AAEb,iBAAW,KAAK,gBAAgB;AAC9B,cAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,cAAI,MAAM,OAAO;AACf,mBAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,UAC3C;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAED,aAAS,wBAAwB,MAAM;AACrC,YAAM,iBAAiB,IAAI,cAAc,KAAK;AAE9C,SAAG,mCAAmC,MAAM;AAC1C;AAAA,UACE,eAAe,SAAS,gBAAgB,EAAE,MAAM,CAAC,GAAG,MAAM,qBAAqB,CAAC,MAAM,CAAC;AAAA,QACzF,EAAE,WAAW;AAAA,MACf,CAAC;AAED,SAAG,0CAA0C,YAAY;AACvD,cAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,YAAI,OAAO;AACX,cAAM,SAAS,CAAC;AAChB,cAAM,cAAc;AAAA,UAClB,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC;AAAA,QACpE,EACG,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,mBAAW,QAAQ,aAAa;AAC9B,cAAI,CAAC,KAAM;AACX,iBAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,iBAAO,KAAK,MAAM,IAAI;AAAA,QACxB;AACA,cAAM,SAAS,eAAe,OAAO;AACrC,mBAAW,SAAS,QAAQ;AAC1B,iBAAO,SAAS,KAAK;AAAA,QACvB;AACA,eAAO,SAAS;AAChB,eAAO,MAAM;AAEb,mBAAW,KAAK,sBAAsB;AACpC,gBAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,gBAAI,MAAM,OAAO;AACf,qBAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,YAC3C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACD,WAAS,iBAAiB,MAAM;AAC9B,OAAG,8BAA8B,MAAM;AACrC,sBAAgB,QAAQ,CAAC,GAAG,MAAM;AAChC,eAAO,cAAc,CAAC,CAAC,EAAE,cAAc,oBAAoB,CAAC,CAAC;AAAA,MAC/D,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACD,WAAS,mBAAmB,MAAM;AAChC,OAAG,wCAAwC,MAAM;AAC/C,2BAAqB,QAAQ,CAAC,CAAC,GAAG,CAAC,MAAM;AACvC,eAAO,gBAAgB,CAAC,CAAC,EAAE,cAAc,CAAC;AAAA,MAC5C,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH,CAAC;","names":[]}
1
+ {"version":3,"sources":["../../src/tokenize/tokenizer.test.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport { describe, expect, it } from 'vitest';\nimport { SentenceTokenizer, WordTokenizer, hyphenateWord } from './basic/index.js';\nimport { splitParagraphs } from './basic/paragraph.js';\n\nconst TEXT =\n 'Hi! ' +\n 'LiveKit is a platform for live audio and video applications and services. ' +\n 'R.T.C stands for Real-Time Communication... again R.T.C. ' +\n 'Mr. Theo is testing the sentence tokenizer. ' +\n 'This is a test. Another test. ' +\n 'A short sentence. ' +\n 'A longer sentence that is longer than the previous sentence. ' +\n 'Find additional resources on livekit.com. ' +\n 'Find additional resources on docs.livekit.com. ' +\n 'f(x) = x * 2.54 + 42. ' +\n 'Hey! Hi! Hello! ';\n\nconst EXPECTED_MIN_20 = [\n 'Hi! LiveKit is a platform for live audio and video applications and services.',\n 'R.T.C stands for Real-Time Communication... again R.T.C.',\n 'Mr. Theo is testing the sentence tokenizer.',\n 'This is a test. Another test.',\n 'A short sentence. A longer sentence that is longer than the previous sentence.',\n 'Find additional resources on livekit.com.',\n 'Find additional resources on docs.livekit.com.',\n 'f(x) = x * 2.54 + 42.',\n 'Hey! Hi! Hello!',\n];\n\nconst WORDS_TEXT = 'This is a test. Blabla another test! multiple consecutive spaces: done';\nconst WORDS_EXPECTED = [\n 'This',\n 'is',\n 'a',\n 'test',\n 'Blabla',\n 'another',\n 'test',\n 'multiple',\n 'consecutive',\n 'spaces',\n 'done',\n];\n\nconst WORDS_PUNCT_TEXT =\n 'This is <phoneme alphabet=\"cmu-arpabet\" ph=\"AE K CH UW AH L IY\">actually</phoneme> tricky to handle.';\nconst WORDS_PUNCT_EXPECTED = [\n 'This',\n 'is',\n '<phoneme',\n 'alphabet=\"cmu-arpabet\"',\n 'ph=\"AE',\n 'K',\n 'CH',\n 'UW',\n 'AH',\n 'L',\n 'IY\">actually</phoneme>',\n 'tricky',\n 'to',\n 'handle.',\n];\n\nconst HYPHENATOR_TEXT = ['Segment', 'expected', 'communication', 'window', 'welcome', 'bedroom'];\nconst HYPHENATOR_EXPECTED = [\n ['Seg', 'ment'],\n ['ex', 'pect', 'ed'],\n ['com', 'mu', 'ni', 'ca', 'tion'],\n ['win', 'dow'],\n ['wel', 'come'],\n ['bed', 'room'],\n];\n\nconst PARAGRAPH_TEST_CASES: [string, [string, number, number][]][] = [\n ['Single paragraph.', [['Single paragraph.', 0, 17]]],\n [\n 'Paragraph 1.\\n\\nParagraph 2.',\n [\n ['Paragraph 1.', 0, 12],\n ['Paragraph 2.', 14, 26],\n ],\n ],\n [\n 'Para 1.\\n\\nPara 2.\\n\\nPara 3.',\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 9, 16],\n ['Para 3.', 18, 25],\n ],\n ],\n ['\\n\\nParagraph with leading newlines.', [['Paragraph with leading newlines.', 2, 34]]],\n ['Paragraph with trailing newlines.\\n\\n', [['Paragraph with trailing newlines.', 0, 33]]],\n [\n '\\n\\n Paragraph with leading and trailing spaces. \\n\\n',\n [['Paragraph with leading and trailing spaces.', 4, 47]],\n ],\n [\n 'Para 1.\\n\\n\\n\\nPara 2.', // Multiple newlines between paragraphs\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 11, 18],\n ],\n ],\n [\n 'Para 1.\\n \\n \\nPara 2.', // Newlines with spaces between paragraphs\n [\n ['Para 1.', 0, 7],\n ['Para 2.', 12, 19],\n ],\n ],\n [\n '', // Empty string\n [],\n ],\n [\n '\\n\\n\\n', // Only newlines\n [],\n ],\n [\n 'Line 1\\nLine 2\\nLine 3', // Single paragraph with newlines\n [['Line 1\\nLine 2\\nLine 3', 0, 20]],\n ],\n];\n\ndescribe('tokenizer', () => {\n describe('SentenceTokenizer', () => {\n const tokenizer = new SentenceTokenizer();\n\n it('should tokenize sentences correctly', () => {\n expect(tokenizer.tokenize(TEXT).every((x, i) => EXPECTED_MIN_20[i] === x)).toBeTruthy();\n });\n\n it('should stream tokenize sentences correctly', async () => {\n const pattern = [1, 2, 4];\n let text = TEXT;\n const chunks = [];\n const patternIter = Array(Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)))\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizer.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of EXPECTED_MIN_20) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n });\n describe('WordTokenizer', () => {\n const tokenizer = new WordTokenizer();\n\n it('should tokenize words correctly', () => {\n expect(tokenizer.tokenize(WORDS_TEXT).every((x, i) => WORDS_EXPECTED[i] === x)).toBeTruthy();\n });\n\n it('should stream tokenize words correctly', async () => {\n const pattern = [1, 2, 4];\n let text = WORDS_TEXT;\n const chunks = [];\n const patternIter = Array(Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)))\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizer.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of WORDS_EXPECTED) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n\n describe('punctuation handling', () => {\n const tokenizerPunct = new WordTokenizer(false);\n\n it('should tokenize words correctly', () => {\n expect(\n tokenizerPunct.tokenize(WORDS_PUNCT_TEXT).every((x, i) => WORDS_PUNCT_EXPECTED[i] === x),\n ).toBeTruthy();\n });\n\n it('should stream tokenize words correctly', async () => {\n const pattern = [1, 2, 4];\n let text = WORDS_PUNCT_TEXT;\n const chunks = [];\n const patternIter = Array(\n Math.ceil(text.length / pattern.reduce((sum, num) => sum + num, 0)),\n )\n .fill(pattern)\n .flat()\n [Symbol.iterator]();\n\n for (const size of patternIter) {\n if (!text) break;\n chunks.push(text.slice(undefined, size));\n text = text.slice(size);\n }\n const stream = tokenizerPunct.stream();\n for (const chunk of chunks) {\n stream.pushText(chunk);\n }\n stream.endInput();\n stream.close();\n\n for (const x of WORDS_PUNCT_EXPECTED) {\n await stream.next().then((value) => {\n if (value.value) {\n expect(value.value.token).toStrictEqual(x);\n }\n });\n }\n });\n });\n });\n describe('hyphenateWord', () => {\n it('should hyphenate correctly', () => {\n HYPHENATOR_TEXT.forEach((x, i) => {\n expect(hyphenateWord(x)).toStrictEqual(HYPHENATOR_EXPECTED[i]);\n });\n });\n });\n describe('splitParagraphs', () => {\n it('should tokenize paragraphs correctly', () => {\n PARAGRAPH_TEST_CASES.forEach(([a, b]) => {\n expect(splitParagraphs(a)).toStrictEqual(b);\n });\n });\n });\n});\n"],"mappings":"AAGA,SAAS,UAAU,QAAQ,UAAU;AACrC,SAAS,mBAAmB,eAAe,qBAAqB;AAChE,SAAS,uBAAuB;AAEhC,MAAM,OACJ;AAYF,MAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,aAAa;AACnB,MAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,mBACJ;AACF,MAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,kBAAkB,CAAC,WAAW,YAAY,iBAAiB,UAAU,WAAW,SAAS;AAC/F,MAAM,sBAAsB;AAAA,EAC1B,CAAC,OAAO,MAAM;AAAA,EACd,CAAC,MAAM,QAAQ,IAAI;AAAA,EACnB,CAAC,OAAO,MAAM,MAAM,MAAM,MAAM;AAAA,EAChC,CAAC,OAAO,KAAK;AAAA,EACb,CAAC,OAAO,MAAM;AAAA,EACd,CAAC,OAAO,MAAM;AAChB;AAEA,MAAM,uBAA+D;AAAA,EACnE,CAAC,qBAAqB,CAAC,CAAC,qBAAqB,GAAG,EAAE,CAAC,CAAC;AAAA,EACpD;AAAA,IACE;AAAA,IACA;AAAA,MACE,CAAC,gBAAgB,GAAG,EAAE;AAAA,MACtB,CAAC,gBAAgB,IAAI,EAAE;AAAA,IACzB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,GAAG,EAAE;AAAA,MACjB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA,CAAC,wCAAwC,CAAC,CAAC,oCAAoC,GAAG,EAAE,CAAC,CAAC;AAAA,EACtF,CAAC,yCAAyC,CAAC,CAAC,qCAAqC,GAAG,EAAE,CAAC,CAAC;AAAA,EACxF;AAAA,IACE;AAAA,IACA,CAAC,CAAC,+CAA+C,GAAG,EAAE,CAAC;AAAA,EACzD;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA;AAAA,MACE,CAAC,WAAW,GAAG,CAAC;AAAA,MAChB,CAAC,WAAW,IAAI,EAAE;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC;AAAA,EACH;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC;AAAA,EACH;AAAA,EACA;AAAA,IACE;AAAA;AAAA,IACA,CAAC,CAAC,0BAA0B,GAAG,EAAE,CAAC;AAAA,EACpC;AACF;AAEA,SAAS,aAAa,MAAM;AAC1B,WAAS,qBAAqB,MAAM;AAClC,UAAM,YAAY,IAAI,kBAAkB;AAExC,OAAG,uCAAuC,MAAM;AAC9C,aAAO,UAAU,SAAS,IAAI,EAAE,MAAM,CAAC,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW;AAAA,IACxF,CAAC;AAED,OAAG,8CAA8C,YAAY;AAC3D,YAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,UAAI,OAAO;AACX,YAAM,SAAS,CAAC;AAChB,YAAM,cAAc,MAAM,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC,CAAC,EAC1F,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,iBAAW,QAAQ,aAAa;AAC9B,YAAI,CAAC,KAAM;AACX,eAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AACA,YAAM,SAAS,UAAU,OAAO;AAChC,iBAAW,SAAS,QAAQ;AAC1B,eAAO,SAAS,KAAK;AAAA,MACvB;AACA,aAAO,SAAS;AAChB,aAAO,MAAM;AAEb,iBAAW,KAAK,iBAAiB;AAC/B,cAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,cAAI,MAAM,OAAO;AACf,mBAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,UAC3C;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,WAAS,iBAAiB,MAAM;AAC9B,UAAM,YAAY,IAAI,cAAc;AAEpC,OAAG,mCAAmC,MAAM;AAC1C,aAAO,UAAU,SAAS,UAAU,EAAE,MAAM,CAAC,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW;AAAA,IAC7F,CAAC;AAED,OAAG,0CAA0C,YAAY;AACvD,YAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,UAAI,OAAO;AACX,YAAM,SAAS,CAAC;AAChB,YAAM,cAAc,MAAM,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC,CAAC,EAC1F,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,iBAAW,QAAQ,aAAa;AAC9B,YAAI,CAAC,KAAM;AACX,eAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,eAAO,KAAK,MAAM,IAAI;AAAA,MACxB;AACA,YAAM,SAAS,UAAU,OAAO;AAChC,iBAAW,SAAS,QAAQ;AAC1B,eAAO,SAAS,KAAK;AAAA,MACvB;AACA,aAAO,SAAS;AAChB,aAAO,MAAM;AAEb,iBAAW,KAAK,gBAAgB;AAC9B,cAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,cAAI,MAAM,OAAO;AACf,mBAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,UAC3C;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAED,aAAS,wBAAwB,MAAM;AACrC,YAAM,iBAAiB,IAAI,cAAc,KAAK;AAE9C,SAAG,mCAAmC,MAAM;AAC1C;AAAA,UACE,eAAe,SAAS,gBAAgB,EAAE,MAAM,CAAC,GAAG,MAAM,qBAAqB,CAAC,MAAM,CAAC;AAAA,QACzF,EAAE,WAAW;AAAA,MACf,CAAC;AAED,SAAG,0CAA0C,YAAY;AACvD,cAAM,UAAU,CAAC,GAAG,GAAG,CAAC;AACxB,YAAI,OAAO;AACX,cAAM,SAAS,CAAC;AAChB,cAAM,cAAc;AAAA,UAClB,KAAK,KAAK,KAAK,SAAS,QAAQ,OAAO,CAAC,KAAK,QAAQ,MAAM,KAAK,CAAC,CAAC;AAAA,QACpE,EACG,KAAK,OAAO,EACZ,KAAK,EACL,OAAO,QAAQ,EAAE;AAEpB,mBAAW,QAAQ,aAAa;AAC9B,cAAI,CAAC,KAAM;AACX,iBAAO,KAAK,KAAK,MAAM,QAAW,IAAI,CAAC;AACvC,iBAAO,KAAK,MAAM,IAAI;AAAA,QACxB;AACA,cAAM,SAAS,eAAe,OAAO;AACrC,mBAAW,SAAS,QAAQ;AAC1B,iBAAO,SAAS,KAAK;AAAA,QACvB;AACA,eAAO,SAAS;AAChB,eAAO,MAAM;AAEb,mBAAW,KAAK,sBAAsB;AACpC,gBAAM,OAAO,KAAK,EAAE,KAAK,CAAC,UAAU;AAClC,gBAAI,MAAM,OAAO;AACf,qBAAO,MAAM,MAAM,KAAK,EAAE,cAAc,CAAC;AAAA,YAC3C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACD,WAAS,iBAAiB,MAAM;AAC9B,OAAG,8BAA8B,MAAM;AACrC,sBAAgB,QAAQ,CAAC,GAAG,MAAM;AAChC,eAAO,cAAc,CAAC,CAAC,EAAE,cAAc,oBAAoB,CAAC,CAAC;AAAA,MAC/D,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACD,WAAS,mBAAmB,MAAM;AAChC,OAAG,wCAAwC,MAAM;AAC/C,2BAAqB,QAAQ,CAAC,CAAC,GAAG,CAAC,MAAM;AACvC,eAAO,gBAAgB,CAAC,CAAC,EAAE,cAAc,CAAC;AAAA,MAC5C,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH,CAAC;","names":[]}
@@ -22,14 +22,16 @@ __export(stream_adapter_exports, {
22
22
  StreamAdapterWrapper: () => StreamAdapterWrapper
23
23
  });
24
24
  module.exports = __toCommonJS(stream_adapter_exports);
25
+ var import_types = require("../types.cjs");
25
26
  var import_utils = require("../utils.cjs");
27
+ var import_io = require("../voice/io.cjs");
26
28
  var import_tts = require("./tts.cjs");
27
29
  class StreamAdapter extends import_tts.TTS {
28
30
  #tts;
29
31
  #sentenceTokenizer;
30
32
  label;
31
33
  constructor(tts, sentenceTokenizer) {
32
- super(tts.sampleRate, tts.numChannels, { streaming: true });
34
+ super(tts.sampleRate, tts.numChannels, { streaming: true, alignedTranscript: true });
33
35
  this.#tts = tts;
34
36
  this.#sentenceTokenizer = sentenceTokenizer;
35
37
  this.label = this.#tts.label;
@@ -59,6 +61,7 @@ class StreamAdapterWrapper extends import_tts.SynthesizeStream {
59
61
  this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;
60
62
  }
61
63
  async run() {
64
+ let cumulativeDuration = 0;
62
65
  const forwardInput = async () => {
63
66
  for await (const input of this.input) {
64
67
  if (this.abortController.signal.aborted) break;
@@ -89,8 +92,19 @@ class StreamAdapterWrapper extends import_tts.SynthesizeStream {
89
92
  const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);
90
93
  await (prevTask == null ? void 0 : prevTask.result);
91
94
  if (controller.signal.aborted) return;
95
+ const timedString = (0, import_io.createTimedString)({
96
+ text: token,
97
+ startTime: cumulativeDuration
98
+ });
99
+ let isFirstFrame = true;
92
100
  for await (const audio of audioStream) {
93
101
  if (controller.signal.aborted) break;
102
+ if (isFirstFrame) {
103
+ audio.frame.userdata[import_types.USERDATA_TIMED_TRANSCRIPT] = [timedString];
104
+ isFirstFrame = false;
105
+ }
106
+ const frameDuration = audio.frame.samplesPerChannel / audio.frame.sampleRate;
107
+ cumulativeDuration += frameDuration;
94
108
  this.queue.put(audio);
95
109
  }
96
110
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/tts/stream_adapter.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { SentenceStream, SentenceTokenizer } from '../tokenize/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { Task } from '../utils.js';\nimport type { ChunkedStream } from './tts.js';\nimport { SynthesizeStream, TTS } from './tts.js';\n\nexport class StreamAdapter extends TTS {\n #tts: TTS;\n #sentenceTokenizer: SentenceTokenizer;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer) {\n super(tts.sampleRate, tts.numChannels, { streaming: true });\n this.#tts = tts;\n this.#sentenceTokenizer = sentenceTokenizer;\n this.label = this.#tts.label;\n this.label = `tts.StreamAdapter<${this.#tts.label}>`;\n\n this.#tts.on('metrics_collected', (metrics) => {\n this.emit('metrics_collected', metrics);\n });\n this.#tts.on('error', (error) => {\n this.emit('error', error);\n });\n }\n\n synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream {\n return this.#tts.synthesize(text, connOptions, abortSignal);\n }\n\n stream(options?: { connOptions?: APIConnectOptions }): StreamAdapterWrapper {\n return new StreamAdapterWrapper(this.#tts, this.#sentenceTokenizer, options?.connOptions);\n }\n}\n\nexport class StreamAdapterWrapper extends SynthesizeStream {\n #tts: TTS;\n #sentenceStream: SentenceStream;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer, connOptions?: APIConnectOptions) {\n super(tts, connOptions);\n this.#tts = tts;\n this.#sentenceStream = sentenceTokenizer.stream();\n this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;\n }\n\n protected async run() {\n const forwardInput = async () => {\n for await (const input of this.input) {\n if (this.abortController.signal.aborted) break;\n\n if (input === SynthesizeStream.FLUSH_SENTINEL) {\n this.#sentenceStream.flush();\n } else {\n this.#sentenceStream.pushText(input);\n }\n }\n this.#sentenceStream.endInput();\n this.#sentenceStream.close();\n };\n\n const synthesizeSentenceStream = async () => {\n let task: Task<void> | undefined;\n const tokenCompletionTasks: Task<void>[] = [];\n\n for await (const ev of this.#sentenceStream) {\n if (this.abortController.signal.aborted) break;\n\n // this will enable non-blocking synthesis of the stream of tokens\n task = Task.from(\n (controller) => synthesize(ev.token, task, controller),\n this.abortController,\n );\n\n tokenCompletionTasks.push(task);\n }\n\n await Promise.all(tokenCompletionTasks.map((t) => t.result));\n this.queue.put(SynthesizeStream.END_OF_STREAM);\n };\n\n const synthesize = async (\n token: string,\n prevTask: Task<void> | undefined,\n controller: AbortController,\n ) => {\n const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);\n\n // wait for previous audio transcription to complete before starting\n // to queuing audio frames of the current token\n await prevTask?.result;\n if (controller.signal.aborted) return;\n\n for await (const audio of audioStream) {\n if (controller.signal.aborted) break;\n this.queue.put(audio);\n }\n };\n\n await Promise.all([forwardInput(), synthesizeSentenceStream()]);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,mBAAqB;AAErB,iBAAsC;AAE/B,MAAM,sBAAsB,eAAI;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC;AAC1D,UAAM,IAAI,YAAY,IAAI,aAAa,EAAE,WAAW,KAAK,CAAC;AAC1D,SAAK,OAAO;AACZ,SAAK,qBAAqB;AAC1B,SAAK,QAAQ,KAAK,KAAK;AACvB,SAAK,QAAQ,qBAAqB,KAAK,KAAK,KAAK;AAEjD,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY;AAC7C,WAAK,KAAK,qBAAqB,OAAO;AAAA,IACxC,CAAC;AACD,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU;AAC/B,WAAK,KAAK,SAAS,KAAK;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,WACE,MACA,aACA,aACe;AACf,WAAO,KAAK,KAAK,WAAW,MAAM,aAAa,WAAW;AAAA,EAC5D;AAAA,EAEA,OAAO,SAAqE;AAC1E,WAAO,IAAI,qBAAqB,KAAK,MAAM,KAAK,oBAAoB,mCAAS,WAAW;AAAA,EAC1F;AACF;AAEO,MAAM,6BAA6B,4BAAiB;AAAA,EACzD;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC,aAAiC;AAC3F,UAAM,KAAK,WAAW;AACtB,SAAK,OAAO;AACZ,SAAK,kBAAkB,kBAAkB,OAAO;AAChD,SAAK,QAAQ,4BAA4B,KAAK,KAAK,KAAK;AAAA,EAC1D;AAAA,EAEA,MAAgB,MAAM;AACpB,UAAM,eAAe,YAAY;AAC/B,uBAAiB,SAAS,KAAK,OAAO;AACpC,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAEzC,YAAI,UAAU,4BAAiB,gBAAgB;AAC7C,eAAK,gBAAgB,MAAM;AAAA,QAC7B,OAAO;AACL,eAAK,gBAAgB,SAAS,KAAK;AAAA,QACrC;AAAA,MACF;AACA,WAAK,gBAAgB,SAAS;AAC9B,WAAK,gBAAgB,MAAM;AAAA,IAC7B;AAEA,UAAM,2BAA2B,YAAY;AAC3C,UAAI;AACJ,YAAM,uBAAqC,CAAC;AAE5C,uBAAiB,MAAM,KAAK,iBAAiB;AAC3C,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAGzC,eAAO,kBAAK;AAAA,UACV,CAAC,eAAe,WAAW,GAAG,OAAO,MAAM,UAAU;AAAA,UACrD,KAAK;AAAA,QACP;AAEA,6BAAqB,KAAK,IAAI;AAAA,MAChC;AAEA,YAAM,QAAQ,IAAI,qBAAqB,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC;AAC3D,WAAK,MAAM,IAAI,4BAAiB,aAAa;AAAA,IAC/C;AAEA,UAAM,aAAa,OACjB,OACA,UACA,eACG;AACH,YAAM,cAAc,KAAK,KAAK,WAAW,OAAO,KAAK,aAAa,KAAK,WAAW;AAIlF,aAAM,qCAAU;AAChB,UAAI,WAAW,OAAO,QAAS;AAE/B,uBAAiB,SAAS,aAAa;AACrC,YAAI,WAAW,OAAO,QAAS;AAC/B,aAAK,MAAM,IAAI,KAAK;AAAA,MACtB;AAAA,IACF;AAEA,UAAM,QAAQ,IAAI,CAAC,aAAa,GAAG,yBAAyB,CAAC,CAAC;AAAA,EAChE;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/tts/stream_adapter.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { SentenceStream, SentenceTokenizer } from '../tokenize/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { USERDATA_TIMED_TRANSCRIPT } from '../types.js';\nimport { Task } from '../utils.js';\nimport { createTimedString } from '../voice/io.js';\nimport type { ChunkedStream } from './tts.js';\nimport { SynthesizeStream, TTS } from './tts.js';\n\nexport class StreamAdapter extends TTS {\n #tts: TTS;\n #sentenceTokenizer: SentenceTokenizer;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer) {\n super(tts.sampleRate, tts.numChannels, { streaming: true, alignedTranscript: true });\n this.#tts = tts;\n this.#sentenceTokenizer = sentenceTokenizer;\n this.label = this.#tts.label;\n this.label = `tts.StreamAdapter<${this.#tts.label}>`;\n\n this.#tts.on('metrics_collected', (metrics) => {\n this.emit('metrics_collected', metrics);\n });\n this.#tts.on('error', (error) => {\n this.emit('error', error);\n });\n }\n\n synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream {\n return this.#tts.synthesize(text, connOptions, abortSignal);\n }\n\n stream(options?: { connOptions?: APIConnectOptions }): StreamAdapterWrapper {\n return new StreamAdapterWrapper(this.#tts, this.#sentenceTokenizer, options?.connOptions);\n }\n}\n\nexport class StreamAdapterWrapper extends SynthesizeStream {\n #tts: TTS;\n #sentenceStream: SentenceStream;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer, connOptions?: APIConnectOptions) {\n super(tts, connOptions);\n this.#tts = tts;\n this.#sentenceStream = sentenceTokenizer.stream();\n this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;\n }\n\n protected async run() {\n let cumulativeDuration = 0;\n\n const forwardInput = async () => {\n for await (const input of this.input) {\n if (this.abortController.signal.aborted) break;\n\n if (input === SynthesizeStream.FLUSH_SENTINEL) {\n this.#sentenceStream.flush();\n } else {\n this.#sentenceStream.pushText(input);\n }\n }\n this.#sentenceStream.endInput();\n this.#sentenceStream.close();\n };\n\n const synthesizeSentenceStream = async () => {\n let task: Task<void> | undefined;\n const tokenCompletionTasks: Task<void>[] = [];\n\n for await (const ev of this.#sentenceStream) {\n if (this.abortController.signal.aborted) break;\n\n // this will enable non-blocking synthesis of the stream of tokens\n task = Task.from(\n (controller) => synthesize(ev.token, task, controller),\n this.abortController,\n );\n\n tokenCompletionTasks.push(task);\n }\n\n await Promise.all(tokenCompletionTasks.map((t) => t.result));\n this.queue.put(SynthesizeStream.END_OF_STREAM);\n };\n\n const synthesize = async (\n token: string,\n prevTask: Task<void> | undefined,\n controller: AbortController,\n ) => {\n const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);\n\n // wait for previous audio transcription to complete before starting\n // to queuing audio frames of the current token\n await prevTask?.result;\n if (controller.signal.aborted) return;\n\n // Create a TimedString with the sentence text and current cumulative duration\n const timedString = createTimedString({\n text: token,\n startTime: cumulativeDuration,\n });\n\n let isFirstFrame = true;\n for await (const audio of audioStream) {\n if (controller.signal.aborted) break;\n\n // Attach the TimedString to the first frame of this sentence\n if (isFirstFrame) {\n audio.frame.userdata[USERDATA_TIMED_TRANSCRIPT] = [timedString];\n isFirstFrame = false;\n }\n\n // Track cumulative duration\n const frameDuration = audio.frame.samplesPerChannel / audio.frame.sampleRate;\n cumulativeDuration += frameDuration;\n\n this.queue.put(audio);\n }\n };\n\n await Promise.all([forwardInput(), synthesizeSentenceStream()]);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,mBAA0C;AAC1C,mBAAqB;AACrB,gBAAkC;AAElC,iBAAsC;AAE/B,MAAM,sBAAsB,eAAI;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC;AAC1D,UAAM,IAAI,YAAY,IAAI,aAAa,EAAE,WAAW,MAAM,mBAAmB,KAAK,CAAC;AACnF,SAAK,OAAO;AACZ,SAAK,qBAAqB;AAC1B,SAAK,QAAQ,KAAK,KAAK;AACvB,SAAK,QAAQ,qBAAqB,KAAK,KAAK,KAAK;AAEjD,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY;AAC7C,WAAK,KAAK,qBAAqB,OAAO;AAAA,IACxC,CAAC;AACD,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU;AAC/B,WAAK,KAAK,SAAS,KAAK;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,WACE,MACA,aACA,aACe;AACf,WAAO,KAAK,KAAK,WAAW,MAAM,aAAa,WAAW;AAAA,EAC5D;AAAA,EAEA,OAAO,SAAqE;AAC1E,WAAO,IAAI,qBAAqB,KAAK,MAAM,KAAK,oBAAoB,mCAAS,WAAW;AAAA,EAC1F;AACF;AAEO,MAAM,6BAA6B,4BAAiB;AAAA,EACzD;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC,aAAiC;AAC3F,UAAM,KAAK,WAAW;AACtB,SAAK,OAAO;AACZ,SAAK,kBAAkB,kBAAkB,OAAO;AAChD,SAAK,QAAQ,4BAA4B,KAAK,KAAK,KAAK;AAAA,EAC1D;AAAA,EAEA,MAAgB,MAAM;AACpB,QAAI,qBAAqB;AAEzB,UAAM,eAAe,YAAY;AAC/B,uBAAiB,SAAS,KAAK,OAAO;AACpC,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAEzC,YAAI,UAAU,4BAAiB,gBAAgB;AAC7C,eAAK,gBAAgB,MAAM;AAAA,QAC7B,OAAO;AACL,eAAK,gBAAgB,SAAS,KAAK;AAAA,QACrC;AAAA,MACF;AACA,WAAK,gBAAgB,SAAS;AAC9B,WAAK,gBAAgB,MAAM;AAAA,IAC7B;AAEA,UAAM,2BAA2B,YAAY;AAC3C,UAAI;AACJ,YAAM,uBAAqC,CAAC;AAE5C,uBAAiB,MAAM,KAAK,iBAAiB;AAC3C,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAGzC,eAAO,kBAAK;AAAA,UACV,CAAC,eAAe,WAAW,GAAG,OAAO,MAAM,UAAU;AAAA,UACrD,KAAK;AAAA,QACP;AAEA,6BAAqB,KAAK,IAAI;AAAA,MAChC;AAEA,YAAM,QAAQ,IAAI,qBAAqB,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC;AAC3D,WAAK,MAAM,IAAI,4BAAiB,aAAa;AAAA,IAC/C;AAEA,UAAM,aAAa,OACjB,OACA,UACA,eACG;AACH,YAAM,cAAc,KAAK,KAAK,WAAW,OAAO,KAAK,aAAa,KAAK,WAAW;AAIlF,aAAM,qCAAU;AAChB,UAAI,WAAW,OAAO,QAAS;AAG/B,YAAM,kBAAc,6BAAkB;AAAA,QACpC,MAAM;AAAA,QACN,WAAW;AAAA,MACb,CAAC;AAED,UAAI,eAAe;AACnB,uBAAiB,SAAS,aAAa;AACrC,YAAI,WAAW,OAAO,QAAS;AAG/B,YAAI,cAAc;AAChB,gBAAM,MAAM,SAAS,sCAAyB,IAAI,CAAC,WAAW;AAC9D,yBAAe;AAAA,QACjB;AAGA,cAAM,gBAAgB,MAAM,MAAM,oBAAoB,MAAM,MAAM;AAClE,8BAAsB;AAEtB,aAAK,MAAM,IAAI,KAAK;AAAA,MACtB;AAAA,IACF;AAEA,UAAM,QAAQ,IAAI,CAAC,aAAa,GAAG,yBAAyB,CAAC,CAAC;AAAA,EAChE;AACF;","names":[]}
@@ -1 +1 @@
1
- {"version":3,"file":"stream_adapter.d.ts","sourceRoot":"","sources":["../../src/tts/stream_adapter.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAkB,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AAC9E,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAErD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAC9C,OAAO,EAAE,gBAAgB,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC;AAEjD,qBAAa,aAAc,SAAQ,GAAG;;IAGpC,KAAK,EAAE,MAAM,CAAC;gBAEF,GAAG,EAAE,GAAG,EAAE,iBAAiB,EAAE,iBAAiB;IAe1D,UAAU,CACR,IAAI,EAAE,MAAM,EACZ,WAAW,CAAC,EAAE,iBAAiB,EAC/B,WAAW,CAAC,EAAE,WAAW,GACxB,aAAa;IAIhB,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,iBAAiB,CAAA;KAAE,GAAG,oBAAoB;CAG5E;AAED,qBAAa,oBAAqB,SAAQ,gBAAgB;;IAGxD,KAAK,EAAE,MAAM,CAAC;gBAEF,GAAG,EAAE,GAAG,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,WAAW,CAAC,EAAE,iBAAiB;cAO3E,GAAG;CAuDpB"}
1
+ {"version":3,"file":"stream_adapter.d.ts","sourceRoot":"","sources":["../../src/tts/stream_adapter.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAkB,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AAC9E,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAIrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAC9C,OAAO,EAAE,gBAAgB,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC;AAEjD,qBAAa,aAAc,SAAQ,GAAG;;IAGpC,KAAK,EAAE,MAAM,CAAC;gBAEF,GAAG,EAAE,GAAG,EAAE,iBAAiB,EAAE,iBAAiB;IAe1D,UAAU,CACR,IAAI,EAAE,MAAM,EACZ,WAAW,CAAC,EAAE,iBAAiB,EAC/B,WAAW,CAAC,EAAE,WAAW,GACxB,aAAa;IAIhB,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,iBAAiB,CAAA;KAAE,GAAG,oBAAoB;CAG5E;AAED,qBAAa,oBAAqB,SAAQ,gBAAgB;;IAGxD,KAAK,EAAE,MAAM,CAAC;gBAEF,GAAG,EAAE,GAAG,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,WAAW,CAAC,EAAE,iBAAiB;cAO3E,GAAG;CA2EpB"}
@@ -1,11 +1,13 @@
1
+ import { USERDATA_TIMED_TRANSCRIPT } from "../types.js";
1
2
  import { Task } from "../utils.js";
3
+ import { createTimedString } from "../voice/io.js";
2
4
  import { SynthesizeStream, TTS } from "./tts.js";
3
5
  class StreamAdapter extends TTS {
4
6
  #tts;
5
7
  #sentenceTokenizer;
6
8
  label;
7
9
  constructor(tts, sentenceTokenizer) {
8
- super(tts.sampleRate, tts.numChannels, { streaming: true });
10
+ super(tts.sampleRate, tts.numChannels, { streaming: true, alignedTranscript: true });
9
11
  this.#tts = tts;
10
12
  this.#sentenceTokenizer = sentenceTokenizer;
11
13
  this.label = this.#tts.label;
@@ -35,6 +37,7 @@ class StreamAdapterWrapper extends SynthesizeStream {
35
37
  this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;
36
38
  }
37
39
  async run() {
40
+ let cumulativeDuration = 0;
38
41
  const forwardInput = async () => {
39
42
  for await (const input of this.input) {
40
43
  if (this.abortController.signal.aborted) break;
@@ -65,8 +68,19 @@ class StreamAdapterWrapper extends SynthesizeStream {
65
68
  const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);
66
69
  await (prevTask == null ? void 0 : prevTask.result);
67
70
  if (controller.signal.aborted) return;
71
+ const timedString = createTimedString({
72
+ text: token,
73
+ startTime: cumulativeDuration
74
+ });
75
+ let isFirstFrame = true;
68
76
  for await (const audio of audioStream) {
69
77
  if (controller.signal.aborted) break;
78
+ if (isFirstFrame) {
79
+ audio.frame.userdata[USERDATA_TIMED_TRANSCRIPT] = [timedString];
80
+ isFirstFrame = false;
81
+ }
82
+ const frameDuration = audio.frame.samplesPerChannel / audio.frame.sampleRate;
83
+ cumulativeDuration += frameDuration;
70
84
  this.queue.put(audio);
71
85
  }
72
86
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/tts/stream_adapter.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { SentenceStream, SentenceTokenizer } from '../tokenize/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { Task } from '../utils.js';\nimport type { ChunkedStream } from './tts.js';\nimport { SynthesizeStream, TTS } from './tts.js';\n\nexport class StreamAdapter extends TTS {\n #tts: TTS;\n #sentenceTokenizer: SentenceTokenizer;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer) {\n super(tts.sampleRate, tts.numChannels, { streaming: true });\n this.#tts = tts;\n this.#sentenceTokenizer = sentenceTokenizer;\n this.label = this.#tts.label;\n this.label = `tts.StreamAdapter<${this.#tts.label}>`;\n\n this.#tts.on('metrics_collected', (metrics) => {\n this.emit('metrics_collected', metrics);\n });\n this.#tts.on('error', (error) => {\n this.emit('error', error);\n });\n }\n\n synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream {\n return this.#tts.synthesize(text, connOptions, abortSignal);\n }\n\n stream(options?: { connOptions?: APIConnectOptions }): StreamAdapterWrapper {\n return new StreamAdapterWrapper(this.#tts, this.#sentenceTokenizer, options?.connOptions);\n }\n}\n\nexport class StreamAdapterWrapper extends SynthesizeStream {\n #tts: TTS;\n #sentenceStream: SentenceStream;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer, connOptions?: APIConnectOptions) {\n super(tts, connOptions);\n this.#tts = tts;\n this.#sentenceStream = sentenceTokenizer.stream();\n this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;\n }\n\n protected async run() {\n const forwardInput = async () => {\n for await (const input of this.input) {\n if (this.abortController.signal.aborted) break;\n\n if (input === SynthesizeStream.FLUSH_SENTINEL) {\n this.#sentenceStream.flush();\n } else {\n this.#sentenceStream.pushText(input);\n }\n }\n this.#sentenceStream.endInput();\n this.#sentenceStream.close();\n };\n\n const synthesizeSentenceStream = async () => {\n let task: Task<void> | undefined;\n const tokenCompletionTasks: Task<void>[] = [];\n\n for await (const ev of this.#sentenceStream) {\n if (this.abortController.signal.aborted) break;\n\n // this will enable non-blocking synthesis of the stream of tokens\n task = Task.from(\n (controller) => synthesize(ev.token, task, controller),\n this.abortController,\n );\n\n tokenCompletionTasks.push(task);\n }\n\n await Promise.all(tokenCompletionTasks.map((t) => t.result));\n this.queue.put(SynthesizeStream.END_OF_STREAM);\n };\n\n const synthesize = async (\n token: string,\n prevTask: Task<void> | undefined,\n controller: AbortController,\n ) => {\n const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);\n\n // wait for previous audio transcription to complete before starting\n // to queuing audio frames of the current token\n await prevTask?.result;\n if (controller.signal.aborted) return;\n\n for await (const audio of audioStream) {\n if (controller.signal.aborted) break;\n this.queue.put(audio);\n }\n };\n\n await Promise.all([forwardInput(), synthesizeSentenceStream()]);\n }\n}\n"],"mappings":"AAKA,SAAS,YAAY;AAErB,SAAS,kBAAkB,WAAW;AAE/B,MAAM,sBAAsB,IAAI;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC;AAC1D,UAAM,IAAI,YAAY,IAAI,aAAa,EAAE,WAAW,KAAK,CAAC;AAC1D,SAAK,OAAO;AACZ,SAAK,qBAAqB;AAC1B,SAAK,QAAQ,KAAK,KAAK;AACvB,SAAK,QAAQ,qBAAqB,KAAK,KAAK,KAAK;AAEjD,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY;AAC7C,WAAK,KAAK,qBAAqB,OAAO;AAAA,IACxC,CAAC;AACD,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU;AAC/B,WAAK,KAAK,SAAS,KAAK;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,WACE,MACA,aACA,aACe;AACf,WAAO,KAAK,KAAK,WAAW,MAAM,aAAa,WAAW;AAAA,EAC5D;AAAA,EAEA,OAAO,SAAqE;AAC1E,WAAO,IAAI,qBAAqB,KAAK,MAAM,KAAK,oBAAoB,mCAAS,WAAW;AAAA,EAC1F;AACF;AAEO,MAAM,6BAA6B,iBAAiB;AAAA,EACzD;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC,aAAiC;AAC3F,UAAM,KAAK,WAAW;AACtB,SAAK,OAAO;AACZ,SAAK,kBAAkB,kBAAkB,OAAO;AAChD,SAAK,QAAQ,4BAA4B,KAAK,KAAK,KAAK;AAAA,EAC1D;AAAA,EAEA,MAAgB,MAAM;AACpB,UAAM,eAAe,YAAY;AAC/B,uBAAiB,SAAS,KAAK,OAAO;AACpC,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAEzC,YAAI,UAAU,iBAAiB,gBAAgB;AAC7C,eAAK,gBAAgB,MAAM;AAAA,QAC7B,OAAO;AACL,eAAK,gBAAgB,SAAS,KAAK;AAAA,QACrC;AAAA,MACF;AACA,WAAK,gBAAgB,SAAS;AAC9B,WAAK,gBAAgB,MAAM;AAAA,IAC7B;AAEA,UAAM,2BAA2B,YAAY;AAC3C,UAAI;AACJ,YAAM,uBAAqC,CAAC;AAE5C,uBAAiB,MAAM,KAAK,iBAAiB;AAC3C,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAGzC,eAAO,KAAK;AAAA,UACV,CAAC,eAAe,WAAW,GAAG,OAAO,MAAM,UAAU;AAAA,UACrD,KAAK;AAAA,QACP;AAEA,6BAAqB,KAAK,IAAI;AAAA,MAChC;AAEA,YAAM,QAAQ,IAAI,qBAAqB,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC;AAC3D,WAAK,MAAM,IAAI,iBAAiB,aAAa;AAAA,IAC/C;AAEA,UAAM,aAAa,OACjB,OACA,UACA,eACG;AACH,YAAM,cAAc,KAAK,KAAK,WAAW,OAAO,KAAK,aAAa,KAAK,WAAW;AAIlF,aAAM,qCAAU;AAChB,UAAI,WAAW,OAAO,QAAS;AAE/B,uBAAiB,SAAS,aAAa;AACrC,YAAI,WAAW,OAAO,QAAS;AAC/B,aAAK,MAAM,IAAI,KAAK;AAAA,MACtB;AAAA,IACF;AAEA,UAAM,QAAQ,IAAI,CAAC,aAAa,GAAG,yBAAyB,CAAC,CAAC;AAAA,EAChE;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/tts/stream_adapter.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { SentenceStream, SentenceTokenizer } from '../tokenize/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { USERDATA_TIMED_TRANSCRIPT } from '../types.js';\nimport { Task } from '../utils.js';\nimport { createTimedString } from '../voice/io.js';\nimport type { ChunkedStream } from './tts.js';\nimport { SynthesizeStream, TTS } from './tts.js';\n\nexport class StreamAdapter extends TTS {\n #tts: TTS;\n #sentenceTokenizer: SentenceTokenizer;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer) {\n super(tts.sampleRate, tts.numChannels, { streaming: true, alignedTranscript: true });\n this.#tts = tts;\n this.#sentenceTokenizer = sentenceTokenizer;\n this.label = this.#tts.label;\n this.label = `tts.StreamAdapter<${this.#tts.label}>`;\n\n this.#tts.on('metrics_collected', (metrics) => {\n this.emit('metrics_collected', metrics);\n });\n this.#tts.on('error', (error) => {\n this.emit('error', error);\n });\n }\n\n synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream {\n return this.#tts.synthesize(text, connOptions, abortSignal);\n }\n\n stream(options?: { connOptions?: APIConnectOptions }): StreamAdapterWrapper {\n return new StreamAdapterWrapper(this.#tts, this.#sentenceTokenizer, options?.connOptions);\n }\n}\n\nexport class StreamAdapterWrapper extends SynthesizeStream {\n #tts: TTS;\n #sentenceStream: SentenceStream;\n label: string;\n\n constructor(tts: TTS, sentenceTokenizer: SentenceTokenizer, connOptions?: APIConnectOptions) {\n super(tts, connOptions);\n this.#tts = tts;\n this.#sentenceStream = sentenceTokenizer.stream();\n this.label = `tts.StreamAdapterWrapper<${this.#tts.label}>`;\n }\n\n protected async run() {\n let cumulativeDuration = 0;\n\n const forwardInput = async () => {\n for await (const input of this.input) {\n if (this.abortController.signal.aborted) break;\n\n if (input === SynthesizeStream.FLUSH_SENTINEL) {\n this.#sentenceStream.flush();\n } else {\n this.#sentenceStream.pushText(input);\n }\n }\n this.#sentenceStream.endInput();\n this.#sentenceStream.close();\n };\n\n const synthesizeSentenceStream = async () => {\n let task: Task<void> | undefined;\n const tokenCompletionTasks: Task<void>[] = [];\n\n for await (const ev of this.#sentenceStream) {\n if (this.abortController.signal.aborted) break;\n\n // this will enable non-blocking synthesis of the stream of tokens\n task = Task.from(\n (controller) => synthesize(ev.token, task, controller),\n this.abortController,\n );\n\n tokenCompletionTasks.push(task);\n }\n\n await Promise.all(tokenCompletionTasks.map((t) => t.result));\n this.queue.put(SynthesizeStream.END_OF_STREAM);\n };\n\n const synthesize = async (\n token: string,\n prevTask: Task<void> | undefined,\n controller: AbortController,\n ) => {\n const audioStream = this.#tts.synthesize(token, this.connOptions, this.abortSignal);\n\n // wait for previous audio transcription to complete before starting\n // to queuing audio frames of the current token\n await prevTask?.result;\n if (controller.signal.aborted) return;\n\n // Create a TimedString with the sentence text and current cumulative duration\n const timedString = createTimedString({\n text: token,\n startTime: cumulativeDuration,\n });\n\n let isFirstFrame = true;\n for await (const audio of audioStream) {\n if (controller.signal.aborted) break;\n\n // Attach the TimedString to the first frame of this sentence\n if (isFirstFrame) {\n audio.frame.userdata[USERDATA_TIMED_TRANSCRIPT] = [timedString];\n isFirstFrame = false;\n }\n\n // Track cumulative duration\n const frameDuration = audio.frame.samplesPerChannel / audio.frame.sampleRate;\n cumulativeDuration += frameDuration;\n\n this.queue.put(audio);\n }\n };\n\n await Promise.all([forwardInput(), synthesizeSentenceStream()]);\n }\n}\n"],"mappings":"AAKA,SAAS,iCAAiC;AAC1C,SAAS,YAAY;AACrB,SAAS,yBAAyB;AAElC,SAAS,kBAAkB,WAAW;AAE/B,MAAM,sBAAsB,IAAI;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC;AAC1D,UAAM,IAAI,YAAY,IAAI,aAAa,EAAE,WAAW,MAAM,mBAAmB,KAAK,CAAC;AACnF,SAAK,OAAO;AACZ,SAAK,qBAAqB;AAC1B,SAAK,QAAQ,KAAK,KAAK;AACvB,SAAK,QAAQ,qBAAqB,KAAK,KAAK,KAAK;AAEjD,SAAK,KAAK,GAAG,qBAAqB,CAAC,YAAY;AAC7C,WAAK,KAAK,qBAAqB,OAAO;AAAA,IACxC,CAAC;AACD,SAAK,KAAK,GAAG,SAAS,CAAC,UAAU;AAC/B,WAAK,KAAK,SAAS,KAAK;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,WACE,MACA,aACA,aACe;AACf,WAAO,KAAK,KAAK,WAAW,MAAM,aAAa,WAAW;AAAA,EAC5D;AAAA,EAEA,OAAO,SAAqE;AAC1E,WAAO,IAAI,qBAAqB,KAAK,MAAM,KAAK,oBAAoB,mCAAS,WAAW;AAAA,EAC1F;AACF;AAEO,MAAM,6BAA6B,iBAAiB;AAAA,EACzD;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,mBAAsC,aAAiC;AAC3F,UAAM,KAAK,WAAW;AACtB,SAAK,OAAO;AACZ,SAAK,kBAAkB,kBAAkB,OAAO;AAChD,SAAK,QAAQ,4BAA4B,KAAK,KAAK,KAAK;AAAA,EAC1D;AAAA,EAEA,MAAgB,MAAM;AACpB,QAAI,qBAAqB;AAEzB,UAAM,eAAe,YAAY;AAC/B,uBAAiB,SAAS,KAAK,OAAO;AACpC,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAEzC,YAAI,UAAU,iBAAiB,gBAAgB;AAC7C,eAAK,gBAAgB,MAAM;AAAA,QAC7B,OAAO;AACL,eAAK,gBAAgB,SAAS,KAAK;AAAA,QACrC;AAAA,MACF;AACA,WAAK,gBAAgB,SAAS;AAC9B,WAAK,gBAAgB,MAAM;AAAA,IAC7B;AAEA,UAAM,2BAA2B,YAAY;AAC3C,UAAI;AACJ,YAAM,uBAAqC,CAAC;AAE5C,uBAAiB,MAAM,KAAK,iBAAiB;AAC3C,YAAI,KAAK,gBAAgB,OAAO,QAAS;AAGzC,eAAO,KAAK;AAAA,UACV,CAAC,eAAe,WAAW,GAAG,OAAO,MAAM,UAAU;AAAA,UACrD,KAAK;AAAA,QACP;AAEA,6BAAqB,KAAK,IAAI;AAAA,MAChC;AAEA,YAAM,QAAQ,IAAI,qBAAqB,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC;AAC3D,WAAK,MAAM,IAAI,iBAAiB,aAAa;AAAA,IAC/C;AAEA,UAAM,aAAa,OACjB,OACA,UACA,eACG;AACH,YAAM,cAAc,KAAK,KAAK,WAAW,OAAO,KAAK,aAAa,KAAK,WAAW;AAIlF,aAAM,qCAAU;AAChB,UAAI,WAAW,OAAO,QAAS;AAG/B,YAAM,cAAc,kBAAkB;AAAA,QACpC,MAAM;AAAA,QACN,WAAW;AAAA,MACb,CAAC;AAED,UAAI,eAAe;AACnB,uBAAiB,SAAS,aAAa;AACrC,YAAI,WAAW,OAAO,QAAS;AAG/B,YAAI,cAAc;AAChB,gBAAM,MAAM,SAAS,yBAAyB,IAAI,CAAC,WAAW;AAC9D,yBAAe;AAAA,QACjB;AAGA,cAAM,gBAAgB,MAAM,MAAM,oBAAoB,MAAM,MAAM;AAClE,8BAAsB;AAEtB,aAAK,MAAM,IAAI,KAAK;AAAA,MACtB;AAAA,IACF;AAEA,UAAM,QAAQ,IAAI,CAAC,aAAa,GAAG,yBAAyB,CAAC,CAAC;AAAA,EAChE;AACF;","names":[]}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/tts/tts.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { AudioFrame } from '@livekit/rtc-node';\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport type { ReadableStream } from 'node:stream/web';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { TTSMetrics } from '../metrics/base.js';\nimport { DeferredReadableStream } from '../stream/deferred_stream.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS, intervalForRetry } from '../types.js';\nimport { AsyncIterableQueue, delay, mergeFrames, startSoon, toError } from '../utils.js';\n\n/** SynthesizedAudio is a packet of speech synthesis as returned by the TTS. */\nexport interface SynthesizedAudio {\n /** Request ID (one segment could be made up of multiple requests) */\n requestId: string;\n /** Segment ID, each segment is separated by a flush */\n segmentId: string;\n /** Synthesized audio frame */\n frame: AudioFrame;\n /** Current segment of the synthesized audio */\n deltaText?: string;\n /** Whether this is the last frame of the segment (streaming only) */\n final: boolean;\n}\n\n/**\n * Describes the capabilities of the TTS provider.\n *\n * @remarks\n * At present, only `streaming` is supplied to this interface, and the framework only supports\n * providers that do have a streaming endpoint.\n */\nexport interface TTSCapabilities {\n streaming: boolean;\n}\n\nexport interface TTSError {\n type: 'tts_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type TTSCallbacks = {\n ['metrics_collected']: (metrics: TTSMetrics) => void;\n ['error']: (error: TTSError) => void;\n};\n\n/**\n * An instance of a text-to-speech adapter.\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child TTS class, which inherits this class's methods.\n */\nexport abstract class TTS extends (EventEmitter as new () => TypedEmitter<TTSCallbacks>) {\n #capabilities: TTSCapabilities;\n #sampleRate: number;\n #numChannels: number;\n abstract label: string;\n\n constructor(sampleRate: number, numChannels: number, capabilities: TTSCapabilities) {\n super();\n this.#capabilities = capabilities;\n this.#sampleRate = sampleRate;\n this.#numChannels = numChannels;\n }\n\n /** Returns this TTS's capabilities */\n get capabilities(): TTSCapabilities {\n return this.#capabilities;\n }\n\n /** Returns the sample rate of audio frames returned by this TTS */\n get sampleRate(): number {\n return this.#sampleRate;\n }\n\n /** Returns the channel count of audio frames returned by this TTS */\n get numChannels(): number {\n return this.#numChannels;\n }\n\n /**\n * Receives text and returns synthesis in the form of a {@link ChunkedStream}\n */\n abstract synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream;\n\n /**\n * Returns a {@link SynthesizeStream} that can be used to push text and receive audio data\n *\n * @param options - Optional configuration including connection options\n */\n abstract stream(options?: { connOptions?: APIConnectOptions }): SynthesizeStream;\n\n async close(): Promise<void> {\n return;\n }\n}\n\n/**\n * An instance of a text-to-speech stream, as an asynchronous iterable iterator.\n *\n * @example Looping through frames\n * ```ts\n * for await (const event of stream) {\n * await source.captureFrame(event.frame);\n * }\n * ```\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child SynthesizeStream class, which inherits this class's methods.\n */\nexport abstract class SynthesizeStream\n implements AsyncIterableIterator<SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM>\n{\n protected static readonly FLUSH_SENTINEL = Symbol('FLUSH_SENTINEL');\n static readonly END_OF_STREAM = Symbol('END_OF_STREAM');\n protected input = new AsyncIterableQueue<string | typeof SynthesizeStream.FLUSH_SENTINEL>();\n protected queue = new AsyncIterableQueue<\n SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM\n >();\n protected output = new AsyncIterableQueue<\n SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM\n >();\n protected closed = false;\n protected connOptions: APIConnectOptions;\n protected abortController = new AbortController();\n\n private deferredInputStream: DeferredReadableStream<\n string | typeof SynthesizeStream.FLUSH_SENTINEL\n >;\n private logger = log();\n\n abstract label: string;\n\n #tts: TTS;\n #metricsPendingTexts: string[] = [];\n #metricsText = '';\n #monitorMetricsTask?: Promise<void>;\n #ttsRequestSpan?: Span;\n\n constructor(tts: TTS, connOptions: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS) {\n this.#tts = tts;\n this.connOptions = connOptions;\n this.deferredInputStream = new DeferredReadableStream();\n this.pumpInput();\n\n this.abortController.signal.addEventListener('abort', () => {\n this.deferredInputStream.detachSource();\n // TODO (AJS-36) clean this up when we refactor with streams\n if (!this.input.closed) this.input.close();\n if (!this.output.closed) this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#ttsRequestSpan = span;\n span.setAttributes({\n [traceTypes.ATTR_TTS_STREAMING]: true,\n [traceTypes.ATTR_TTS_LABEL]: this.#tts.label,\n });\n\n for (let i = 0; i < this.connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'tts_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this.connOptions, i);\n\n if (this.connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this.connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate TTS completion after ${this.connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n // Don't emit error event for recoverable errors during retry loop\n // to avoid ERR_UNHANDLED_ERROR or premature session termination\n this.logger.warn(\n { tts: this.#tts.label, attempt: i + 1, error },\n `failed to synthesize speech, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'tts_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#tts.emit('error', {\n type: 'tts_error',\n timestamp: Date.now(),\n label: this.#tts.label,\n error,\n recoverable,\n });\n }\n\n // NOTE(AJS-37): The implementation below uses an AsyncIterableQueue (`this.input`)\n // bridged from a DeferredReadableStream (`this.deferredInputStream`) rather than\n // consuming the stream directly.\n //\n // A full refactor to native Web Streams was considered but is currently deferred.\n // The primary reason is to maintain architectural parity with the Python SDK,\n // which is a key design goal for the project. This ensures a consistent developer\n // experience across both platforms.\n //\n // For more context, see the discussion in GitHub issue # 844.\n protected async pumpInput() {\n const reader = this.deferredInputStream.stream.getReader();\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done || value === SynthesizeStream.FLUSH_SENTINEL) {\n break;\n }\n this.pushText(value);\n }\n this.endInput();\n } catch (error) {\n this.logger.error(error, 'Error reading deferred input stream');\n } finally {\n reader.releaseLock();\n // Ensure output is closed when the stream ends\n if (!this.#monitorMetricsTask) {\n // No text was received, close the output directly\n this.output.close();\n }\n }\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let audioDurationMs = 0;\n let ttfb: bigint = BigInt(-1);\n let requestId = '';\n\n const emit = () => {\n if (this.#metricsPendingTexts.length) {\n const text = this.#metricsPendingTexts.shift()!;\n const duration = process.hrtime.bigint() - startTime;\n const roundedAudioDurationMs = Math.round(audioDurationMs);\n const metrics: TTSMetrics = {\n type: 'tts_metrics',\n timestamp: Date.now(),\n requestId,\n ttfbMs: ttfb === BigInt(-1) ? -1 : Math.trunc(Number(ttfb / BigInt(1000000))),\n durationMs: Math.trunc(Number(duration / BigInt(1000000))),\n charactersCount: text.length,\n audioDurationMs: roundedAudioDurationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#tts.label,\n streamed: false,\n };\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.setAttribute(traceTypes.ATTR_TTS_METRICS, JSON.stringify(metrics));\n }\n this.#tts.emit('metrics_collected', metrics);\n }\n };\n\n for await (const audio of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(audio);\n if (audio === SynthesizeStream.END_OF_STREAM) continue;\n requestId = audio.requestId;\n if (ttfb === BigInt(-1)) {\n ttfb = process.hrtime.bigint() - startTime;\n }\n // TODO(AJS-102): use frame.durationMs once available in rtc-node\n audioDurationMs += (audio.frame.samplesPerChannel / audio.frame.sampleRate) * 1000;\n if (audio.final) {\n emit();\n }\n }\n\n if (requestId) {\n emit();\n }\n\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.end();\n this.#ttsRequestSpan = undefined;\n }\n }\n\n protected abstract run(): Promise<void>;\n\n updateInputStream(text: ReadableStream<string>) {\n this.deferredInputStream.setSource(text);\n }\n\n /** Push a string of text to the TTS */\n /** @deprecated Use `updateInputStream` instead */\n pushText(text: string) {\n if (!this.#monitorMetricsTask) {\n this.#monitorMetricsTask = this.monitorMetrics();\n // Close output when metrics task completes\n this.#monitorMetricsTask.finally(() => this.output.close());\n }\n this.#metricsText += text;\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.put(text);\n }\n\n /** Flush the TTS, causing it to process all pending text */\n flush() {\n if (this.#metricsText) {\n this.#metricsPendingTexts.push(this.#metricsText);\n this.#metricsText = '';\n }\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.put(SynthesizeStream.FLUSH_SENTINEL);\n }\n\n /** Mark the input as ended and forbid additional pushes */\n endInput() {\n this.flush();\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.close();\n }\n\n next(): Promise<IteratorResult<SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM>> {\n return this.output.next();\n }\n\n get abortSignal(): AbortSignal {\n return this.abortController.signal;\n }\n\n /** Close both the input and output of the TTS stream */\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): SynthesizeStream {\n return this;\n }\n}\n\n/**\n * An instance of a text-to-speech response, as an asynchronous iterable iterator.\n *\n * @example Looping through frames\n * ```ts\n * for await (const event of stream) {\n * await source.captureFrame(event.frame);\n * }\n * ```\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child ChunkedStream class, which inherits this class's methods.\n */\nexport abstract class ChunkedStream implements AsyncIterableIterator<SynthesizedAudio> {\n protected queue = new AsyncIterableQueue<SynthesizedAudio>();\n protected output = new AsyncIterableQueue<SynthesizedAudio>();\n protected closed = false;\n abstract label: string;\n #text: string;\n #tts: TTS;\n #ttsRequestSpan?: Span;\n private _connOptions: APIConnectOptions;\n private logger = log();\n\n protected abortController = new AbortController();\n\n constructor(\n text: string,\n tts: TTS,\n connOptions: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,\n abortSignal?: AbortSignal,\n ) {\n this.#text = text;\n this.#tts = tts;\n this._connOptions = connOptions;\n\n if (abortSignal) {\n abortSignal.addEventListener('abort', () => this.abortController.abort(), { once: true });\n }\n\n this.monitorMetrics();\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n Promise.resolve().then(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#ttsRequestSpan = span;\n span.setAttributes({\n [traceTypes.ATTR_TTS_STREAMING]: false,\n [traceTypes.ATTR_TTS_LABEL]: this.#tts.label,\n });\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'tts_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this._connOptions, i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate TTS completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n // Don't emit error event for recoverable errors during retry loop\n // to avoid ERR_UNHANDLED_ERROR or premature session termination\n this.logger.warn(\n { tts: this.#tts.label, attempt: i + 1, error },\n `failed to generate TTS completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private async mainTask() {\n return tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'tts_request',\n endOnExit: false,\n });\n }\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#tts.emit('error', {\n type: 'tts_error',\n timestamp: Date.now(),\n label: this.#tts.label,\n error,\n recoverable,\n });\n }\n\n protected abstract run(): Promise<void>;\n\n get inputText(): string {\n return this.#text;\n }\n\n get abortSignal(): AbortSignal {\n return this.abortController.signal;\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let audioDurationMs = 0;\n let ttfb: bigint = BigInt(-1);\n let requestId = '';\n\n for await (const audio of this.queue) {\n this.output.put(audio);\n requestId = audio.requestId;\n if (ttfb === BigInt(-1)) {\n ttfb = process.hrtime.bigint() - startTime;\n }\n audioDurationMs += (audio.frame.samplesPerChannel / audio.frame.sampleRate) * 1000;\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const metrics: TTSMetrics = {\n type: 'tts_metrics',\n timestamp: Date.now(),\n requestId,\n ttfbMs: ttfb === BigInt(-1) ? -1 : Math.trunc(Number(ttfb / BigInt(1000000))),\n durationMs: Math.trunc(Number(duration / BigInt(1000000))),\n charactersCount: this.#text.length,\n audioDurationMs: Math.round(audioDurationMs),\n cancelled: false, // TODO(AJS-186): support ChunkedStream with 1.0 - add this.abortController.signal.aborted here\n label: this.#tts.label,\n streamed: false,\n };\n\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.setAttribute(traceTypes.ATTR_TTS_METRICS, JSON.stringify(metrics));\n this.#ttsRequestSpan.end();\n this.#ttsRequestSpan = undefined;\n }\n\n this.#tts.emit('metrics_collected', metrics);\n }\n\n /** Collect every frame into one in a single call */\n async collect(): Promise<AudioFrame> {\n const frames = [];\n for await (const event of this) {\n frames.push(event.frame);\n }\n return mergeFrames(frames);\n }\n\n next(): Promise<IteratorResult<SynthesizedAudio>> {\n return this.output.next();\n }\n\n /** Close both the input and output of the TTS stream */\n close() {\n if (!this.queue.closed) this.queue.close();\n if (!this.output.closed) this.output.close();\n if (!this.abortController.signal.aborted) this.abortController.abort();\n this.closed = true;\n }\n\n [Symbol.asyncIterator](): ChunkedStream {\n return this;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAMA,yBAA6B;AAE7B,wBAA6C;AAC7C,iBAAoB;AAEpB,6BAAuC;AACvC,uBAAoD;AACpD,mBAAsF;AACtF,mBAA2E;AA+CpE,MAAe,YAAa,gCAAsD;AAAA,EACvF;AAAA,EACA;AAAA,EACA;AAAA,EAGA,YAAY,YAAoB,aAAqB,cAA+B;AAClF,UAAM;AACN,SAAK,gBAAgB;AACrB,SAAK,cAAc;AACnB,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA,EAGA,IAAI,eAAgC;AAClC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,aAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAsB;AACxB,WAAO,KAAK;AAAA,EACd;AAAA,EAkBA,MAAM,QAAuB;AAC3B;AAAA,EACF;AACF;AAgBO,MAAe,iBAEtB;AAAA,EACE,OAA0B,iBAAiB,OAAO,gBAAgB;AAAA,EAClE,OAAgB,gBAAgB,OAAO,eAAe;AAAA,EAC5C,QAAQ,IAAI,gCAAoE;AAAA,EAChF,QAAQ,IAAI,gCAEpB;AAAA,EACQ,SAAS,IAAI,gCAErB;AAAA,EACQ,SAAS;AAAA,EACT;AAAA,EACA,kBAAkB,IAAI,gBAAgB;AAAA,EAExC;AAAA,EAGA,aAAS,gBAAI;AAAA,EAIrB;AAAA,EACA,uBAAiC,CAAC;AAAA,EAClC,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,cAAiC,0CAA6B;AAClF,SAAK,OAAO;AACZ,SAAK,cAAc;AACnB,SAAK,sBAAsB,IAAI,8CAAuB;AACtD,SAAK,UAAU;AAEf,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAC1D,WAAK,oBAAoB,aAAa;AAEtC,UAAI,CAAC,KAAK,MAAM,OAAQ,MAAK,MAAM,MAAM;AACzC,UAAI,CAAC,KAAK,OAAO,OAAQ,MAAK,OAAO,MAAM;AAC3C,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,gCAAU,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EACnE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,cAAc;AAAA,MACjB,CAAC,4BAAW,kBAAkB,GAAG;AAAA,MACjC,CAAC,4BAAW,cAAc,GAAG,KAAK,KAAK;AAAA,IACzC,CAAC;AAED,aAAS,IAAI,GAAG,IAAI,KAAK,YAAY,WAAW,GAAG,KAAK;AACtD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,aAAa,CAAC;AAE1D,cAAI,KAAK,YAAY,aAAa,KAAK,CAAC,MAAM,WAAW;AACvD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,YAAY,UAAU;AAC1C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,YAAY,WAAW,CAAC;AAAA,cACjF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AAGL,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,OAAO,SAAS,IAAI,GAAG,MAAM;AAAA,cAC9C,6CAA6C,aAAa;AAAA,YAC5D;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAgB,YAAY;AAC1B,UAAM,SAAS,KAAK,oBAAoB,OAAO,UAAU;AACzD,QAAI;AACF,aAAO,MAAM;AACX,cAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,YAAI,QAAQ,UAAU,iBAAiB,gBAAgB;AACrD;AAAA,QACF;AACA,aAAK,SAAS,KAAK;AAAA,MACrB;AACA,WAAK,SAAS;AAAA,IAChB,SAAS,OAAO;AACd,WAAK,OAAO,MAAM,OAAO,qCAAqC;AAAA,IAChE,UAAE;AACA,aAAO,YAAY;AAEnB,UAAI,CAAC,KAAK,qBAAqB;AAE7B,aAAK,OAAO,MAAM;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,kBAAkB;AACtB,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAEhB,UAAM,OAAO,MAAM;AACjB,UAAI,KAAK,qBAAqB,QAAQ;AACpC,cAAM,OAAO,KAAK,qBAAqB,MAAM;AAC7C,cAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,cAAM,yBAAyB,KAAK,MAAM,eAAe;AACzD,cAAM,UAAsB;AAAA,UAC1B,MAAM;AAAA,UACN,WAAW,KAAK,IAAI;AAAA,UACpB;AAAA,UACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,UAC5E,YAAY,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAAA,UACzD,iBAAiB,KAAK;AAAA,UACtB,iBAAiB;AAAA,UACjB,WAAW,KAAK,gBAAgB,OAAO;AAAA,UACvC,OAAO,KAAK,KAAK;AAAA,UACjB,UAAU;AAAA,QACZ;AACA,YAAI,KAAK,iBAAiB;AACxB,eAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAAA,QACxF;AACA,aAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,MAC7C;AAAA,IACF;AAEA,qBAAiB,SAAS,KAAK,OAAO;AACpC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,KAAK;AACrB,UAAI,UAAU,iBAAiB,cAAe;AAC9C,kBAAY,MAAM;AAClB,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AAEA,yBAAoB,MAAM,MAAM,oBAAoB,MAAM,MAAM,aAAc;AAC9E,UAAI,MAAM,OAAO;AACf,aAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW;AACb,WAAK;AAAA,IACP;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,IAAI;AACzB,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA,EAIA,kBAAkB,MAA8B;AAC9C,SAAK,oBAAoB,UAAU,IAAI;AAAA,EACzC;AAAA;AAAA;AAAA,EAIA,SAAS,MAAc;AACrB,QAAI,CAAC,KAAK,qBAAqB;AAC7B,WAAK,sBAAsB,KAAK,eAAe;AAE/C,WAAK,oBAAoB,QAAQ,MAAM,KAAK,OAAO,MAAM,CAAC;AAAA,IAC5D;AACA,SAAK,gBAAgB;AAErB,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,IAAI;AAAA,EACrB;AAAA;AAAA,EAGA,QAAQ;AACN,QAAI,KAAK,cAAc;AACrB,WAAK,qBAAqB,KAAK,KAAK,YAAY;AAChD,WAAK,eAAe;AAAA,IACtB;AAEA,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,iBAAiB,cAAc;AAAA,EAChD;AAAA;AAAA,EAGA,WAAW;AACT,SAAK,MAAM;AAEX,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,MAAM;AAAA,EACnB;AAAA,EAEA,OAA0F;AACxF,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,IAAI,cAA2B;AAC7B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA;AAAA,EAGA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAsB;AACzC,WAAO;AAAA,EACT;AACF;AAgBO,MAAe,cAAiE;AAAA,EAC3E,QAAQ,IAAI,gCAAqC;AAAA,EACjD,SAAS,IAAI,gCAAqC;AAAA,EAClD,SAAS;AAAA,EAEnB;AAAA,EACA;AAAA,EACA;AAAA,EACQ;AAAA,EACA,aAAS,gBAAI;AAAA,EAEX,kBAAkB,IAAI,gBAAgB;AAAA,EAEhD,YACE,MACA,KACA,cAAiC,0CACjC,aACA;AACA,SAAK,QAAQ;AACb,SAAK,OAAO;AACZ,SAAK,eAAe;AAEpB,QAAI,aAAa;AACf,kBAAY,iBAAiB,SAAS,MAAM,KAAK,gBAAgB,MAAM,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA,IAC1F;AAEA,SAAK,eAAe;AAMpB,YAAQ,QAAQ,EAAE,KAAK,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChF;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,cAAc;AAAA,MACjB,CAAC,4BAAW,kBAAkB,GAAG;AAAA,MACjC,CAAC,4BAAW,cAAc,GAAG,KAAK,KAAK;AAAA,IACzC,CAAC;AAED,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,cAAc,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AAGL,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,OAAO,SAAS,IAAI,GAAG,MAAM;AAAA,cAC9C,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,WAAW;AACvB,WAAO,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,MACtE,MAAM;AAAA,MACN,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAAA,EAEQ,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAIA,IAAI,YAAoB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,cAA2B;AAC7B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,kBAAkB;AACtB,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAEhB,qBAAiB,SAAS,KAAK,OAAO;AACpC,WAAK,OAAO,IAAI,KAAK;AACrB,kBAAY,MAAM;AAClB,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AACA,yBAAoB,MAAM,MAAM,oBAAoB,MAAM,MAAM,aAAc;AAAA,IAChF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E,YAAY,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAAA,MACzD,iBAAiB,KAAK,MAAM;AAAA,MAC5B,iBAAiB,KAAK,MAAM,eAAe;AAAA,MAC3C,WAAW;AAAA;AAAA,MACX,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU;AAAA,IACZ;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AACtF,WAAK,gBAAgB,IAAI;AACzB,WAAK,kBAAkB;AAAA,IACzB;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAGA,MAAM,UAA+B;AACnC,UAAM,SAAS,CAAC;AAChB,qBAAiB,SAAS,MAAM;AAC9B,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB;AACA,eAAO,0BAAY,MAAM;AAAA,EAC3B;AAAA,EAEA,OAAkD;AAChD,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA;AAAA,EAGA,QAAQ;AACN,QAAI,CAAC,KAAK,MAAM,OAAQ,MAAK,MAAM,MAAM;AACzC,QAAI,CAAC,KAAK,OAAO,OAAQ,MAAK,OAAO,MAAM;AAC3C,QAAI,CAAC,KAAK,gBAAgB,OAAO,QAAS,MAAK,gBAAgB,MAAM;AACrE,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,CAAC,OAAO,aAAa,IAAmB;AACtC,WAAO;AAAA,EACT;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/tts/tts.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { AudioFrame } from '@livekit/rtc-node';\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport type { ReadableStream } from 'node:stream/web';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { TTSMetrics } from '../metrics/base.js';\nimport { DeferredReadableStream } from '../stream/deferred_stream.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS, intervalForRetry } from '../types.js';\nimport { AsyncIterableQueue, delay, mergeFrames, startSoon, toError } from '../utils.js';\nimport type { TimedString } from '../voice/io.js';\n\n/**\n * SynthesizedAudio is a packet of speech synthesis as returned by the TTS.\n */\nexport interface SynthesizedAudio {\n /** Request ID (one segment could be made up of multiple requests) */\n requestId: string;\n /** Segment ID, each segment is separated by a flush */\n segmentId: string;\n /** Synthesized audio frame */\n frame: AudioFrame;\n /** Current segment of the synthesized audio */\n deltaText?: string;\n /** Whether this is the last frame of the segment (streaming only) */\n final: boolean;\n /**\n * Timed transcripts associated with this audio packet (word-level timestamps).\n */\n timedTranscripts?: TimedString[];\n}\n\n/**\n * Describes the capabilities of the TTS provider.\n *\n * @remarks\n * At present, only `streaming` is supplied to this interface, and the framework only supports\n * providers that do have a streaming endpoint.\n */\nexport interface TTSCapabilities {\n streaming: boolean;\n // Whether this TTS supports aligned transcripts (word-level timestamps).\n alignedTranscript?: boolean;\n}\n\nexport interface TTSError {\n type: 'tts_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type TTSCallbacks = {\n ['metrics_collected']: (metrics: TTSMetrics) => void;\n ['error']: (error: TTSError) => void;\n};\n\n/**\n * An instance of a text-to-speech adapter.\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child TTS class, which inherits this class's methods.\n */\nexport abstract class TTS extends (EventEmitter as new () => TypedEmitter<TTSCallbacks>) {\n #capabilities: TTSCapabilities;\n #sampleRate: number;\n #numChannels: number;\n abstract label: string;\n\n constructor(sampleRate: number, numChannels: number, capabilities: TTSCapabilities) {\n super();\n this.#capabilities = capabilities;\n this.#sampleRate = sampleRate;\n this.#numChannels = numChannels;\n }\n\n /** Returns this TTS's capabilities */\n get capabilities(): TTSCapabilities {\n return this.#capabilities;\n }\n\n /** Returns the sample rate of audio frames returned by this TTS */\n get sampleRate(): number {\n return this.#sampleRate;\n }\n\n /** Returns the channel count of audio frames returned by this TTS */\n get numChannels(): number {\n return this.#numChannels;\n }\n\n /**\n * Receives text and returns synthesis in the form of a {@link ChunkedStream}\n */\n abstract synthesize(\n text: string,\n connOptions?: APIConnectOptions,\n abortSignal?: AbortSignal,\n ): ChunkedStream;\n\n /**\n * Returns a {@link SynthesizeStream} that can be used to push text and receive audio data\n *\n * @param options - Optional configuration including connection options\n */\n abstract stream(options?: { connOptions?: APIConnectOptions }): SynthesizeStream;\n\n async close(): Promise<void> {\n return;\n }\n}\n\n/**\n * An instance of a text-to-speech stream, as an asynchronous iterable iterator.\n *\n * @example Looping through frames\n * ```ts\n * for await (const event of stream) {\n * await source.captureFrame(event.frame);\n * }\n * ```\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child SynthesizeStream class, which inherits this class's methods.\n */\nexport abstract class SynthesizeStream\n implements AsyncIterableIterator<SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM>\n{\n protected static readonly FLUSH_SENTINEL = Symbol('FLUSH_SENTINEL');\n static readonly END_OF_STREAM = Symbol('END_OF_STREAM');\n protected input = new AsyncIterableQueue<string | typeof SynthesizeStream.FLUSH_SENTINEL>();\n protected queue = new AsyncIterableQueue<\n SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM\n >();\n protected output = new AsyncIterableQueue<\n SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM\n >();\n protected closed = false;\n protected connOptions: APIConnectOptions;\n protected abortController = new AbortController();\n\n private deferredInputStream: DeferredReadableStream<\n string | typeof SynthesizeStream.FLUSH_SENTINEL\n >;\n private logger = log();\n\n abstract label: string;\n\n #tts: TTS;\n #metricsPendingTexts: string[] = [];\n #metricsText = '';\n #monitorMetricsTask?: Promise<void>;\n #ttsRequestSpan?: Span;\n\n constructor(tts: TTS, connOptions: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS) {\n this.#tts = tts;\n this.connOptions = connOptions;\n this.deferredInputStream = new DeferredReadableStream();\n this.pumpInput();\n\n this.abortController.signal.addEventListener('abort', () => {\n this.deferredInputStream.detachSource();\n // TODO (AJS-36) clean this up when we refactor with streams\n if (!this.input.closed) this.input.close();\n if (!this.output.closed) this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#ttsRequestSpan = span;\n span.setAttributes({\n [traceTypes.ATTR_TTS_STREAMING]: true,\n [traceTypes.ATTR_TTS_LABEL]: this.#tts.label,\n });\n\n for (let i = 0; i < this.connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'tts_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this.connOptions, i);\n\n if (this.connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this.connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate TTS completion after ${this.connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n // Don't emit error event for recoverable errors during retry loop\n // to avoid ERR_UNHANDLED_ERROR or premature session termination\n this.logger.warn(\n { tts: this.#tts.label, attempt: i + 1, error },\n `failed to synthesize speech, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'tts_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#tts.emit('error', {\n type: 'tts_error',\n timestamp: Date.now(),\n label: this.#tts.label,\n error,\n recoverable,\n });\n }\n\n // NOTE(AJS-37): The implementation below uses an AsyncIterableQueue (`this.input`)\n // bridged from a DeferredReadableStream (`this.deferredInputStream`) rather than\n // consuming the stream directly.\n //\n // A full refactor to native Web Streams was considered but is currently deferred.\n // The primary reason is to maintain architectural parity with the Python SDK,\n // which is a key design goal for the project. This ensures a consistent developer\n // experience across both platforms.\n //\n // For more context, see the discussion in GitHub issue # 844.\n protected async pumpInput() {\n const reader = this.deferredInputStream.stream.getReader();\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done || value === SynthesizeStream.FLUSH_SENTINEL) {\n break;\n }\n this.pushText(value);\n }\n this.endInput();\n } catch (error) {\n this.logger.error(error, 'Error reading deferred input stream');\n } finally {\n reader.releaseLock();\n // Ensure output is closed when the stream ends\n if (!this.#monitorMetricsTask) {\n // No text was received, close the output directly\n this.output.close();\n }\n }\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let audioDurationMs = 0;\n let ttfb: bigint = BigInt(-1);\n let requestId = '';\n\n const emit = () => {\n if (this.#metricsPendingTexts.length) {\n const text = this.#metricsPendingTexts.shift()!;\n const duration = process.hrtime.bigint() - startTime;\n const roundedAudioDurationMs = Math.round(audioDurationMs);\n const metrics: TTSMetrics = {\n type: 'tts_metrics',\n timestamp: Date.now(),\n requestId,\n ttfbMs: ttfb === BigInt(-1) ? -1 : Math.trunc(Number(ttfb / BigInt(1000000))),\n durationMs: Math.trunc(Number(duration / BigInt(1000000))),\n charactersCount: text.length,\n audioDurationMs: roundedAudioDurationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#tts.label,\n streamed: false,\n };\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.setAttribute(traceTypes.ATTR_TTS_METRICS, JSON.stringify(metrics));\n }\n this.#tts.emit('metrics_collected', metrics);\n }\n };\n\n for await (const audio of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(audio);\n if (audio === SynthesizeStream.END_OF_STREAM) continue;\n requestId = audio.requestId;\n if (ttfb === BigInt(-1)) {\n ttfb = process.hrtime.bigint() - startTime;\n }\n // TODO(AJS-102): use frame.durationMs once available in rtc-node\n audioDurationMs += (audio.frame.samplesPerChannel / audio.frame.sampleRate) * 1000;\n if (audio.final) {\n emit();\n }\n }\n\n if (requestId) {\n emit();\n }\n\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.end();\n this.#ttsRequestSpan = undefined;\n }\n }\n\n protected abstract run(): Promise<void>;\n\n updateInputStream(text: ReadableStream<string>) {\n this.deferredInputStream.setSource(text);\n }\n\n /** Push a string of text to the TTS */\n /** @deprecated Use `updateInputStream` instead */\n pushText(text: string) {\n if (!this.#monitorMetricsTask) {\n this.#monitorMetricsTask = this.monitorMetrics();\n // Close output when metrics task completes\n this.#monitorMetricsTask.finally(() => this.output.close());\n }\n this.#metricsText += text;\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.put(text);\n }\n\n /** Flush the TTS, causing it to process all pending text */\n flush() {\n if (this.#metricsText) {\n this.#metricsPendingTexts.push(this.#metricsText);\n this.#metricsText = '';\n }\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.put(SynthesizeStream.FLUSH_SENTINEL);\n }\n\n /** Mark the input as ended and forbid additional pushes */\n endInput() {\n this.flush();\n\n if (this.input.closed || this.closed) {\n // Stream was aborted/closed, silently skip\n return;\n }\n\n this.input.close();\n }\n\n next(): Promise<IteratorResult<SynthesizedAudio | typeof SynthesizeStream.END_OF_STREAM>> {\n return this.output.next();\n }\n\n get abortSignal(): AbortSignal {\n return this.abortController.signal;\n }\n\n /** Close both the input and output of the TTS stream */\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): SynthesizeStream {\n return this;\n }\n}\n\n/**\n * An instance of a text-to-speech response, as an asynchronous iterable iterator.\n *\n * @example Looping through frames\n * ```ts\n * for await (const event of stream) {\n * await source.captureFrame(event.frame);\n * }\n * ```\n *\n * @remarks\n * This class is abstract, and as such cannot be used directly. Instead, use a provider plugin that\n * exports its own child ChunkedStream class, which inherits this class's methods.\n */\nexport abstract class ChunkedStream implements AsyncIterableIterator<SynthesizedAudio> {\n protected queue = new AsyncIterableQueue<SynthesizedAudio>();\n protected output = new AsyncIterableQueue<SynthesizedAudio>();\n protected closed = false;\n abstract label: string;\n #text: string;\n #tts: TTS;\n #ttsRequestSpan?: Span;\n private _connOptions: APIConnectOptions;\n private logger = log();\n\n protected abortController = new AbortController();\n\n constructor(\n text: string,\n tts: TTS,\n connOptions: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,\n abortSignal?: AbortSignal,\n ) {\n this.#text = text;\n this.#tts = tts;\n this._connOptions = connOptions;\n\n if (abortSignal) {\n abortSignal.addEventListener('abort', () => this.abortController.abort(), { once: true });\n }\n\n this.monitorMetrics();\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n Promise.resolve().then(() => this.mainTask().finally(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#ttsRequestSpan = span;\n span.setAttributes({\n [traceTypes.ATTR_TTS_STREAMING]: false,\n [traceTypes.ATTR_TTS_LABEL]: this.#tts.label,\n });\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'tts_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = intervalForRetry(this._connOptions, i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate TTS completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n // Don't emit error event for recoverable errors during retry loop\n // to avoid ERR_UNHANDLED_ERROR or premature session termination\n this.logger.warn(\n { tts: this.#tts.label, attempt: i + 1, error },\n `failed to generate TTS completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private async mainTask() {\n return tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'tts_request',\n endOnExit: false,\n });\n }\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#tts.emit('error', {\n type: 'tts_error',\n timestamp: Date.now(),\n label: this.#tts.label,\n error,\n recoverable,\n });\n }\n\n protected abstract run(): Promise<void>;\n\n get inputText(): string {\n return this.#text;\n }\n\n get abortSignal(): AbortSignal {\n return this.abortController.signal;\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let audioDurationMs = 0;\n let ttfb: bigint = BigInt(-1);\n let requestId = '';\n\n for await (const audio of this.queue) {\n this.output.put(audio);\n requestId = audio.requestId;\n if (ttfb === BigInt(-1)) {\n ttfb = process.hrtime.bigint() - startTime;\n }\n audioDurationMs += (audio.frame.samplesPerChannel / audio.frame.sampleRate) * 1000;\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const metrics: TTSMetrics = {\n type: 'tts_metrics',\n timestamp: Date.now(),\n requestId,\n ttfbMs: ttfb === BigInt(-1) ? -1 : Math.trunc(Number(ttfb / BigInt(1000000))),\n durationMs: Math.trunc(Number(duration / BigInt(1000000))),\n charactersCount: this.#text.length,\n audioDurationMs: Math.round(audioDurationMs),\n cancelled: false, // TODO(AJS-186): support ChunkedStream with 1.0 - add this.abortController.signal.aborted here\n label: this.#tts.label,\n streamed: false,\n };\n\n if (this.#ttsRequestSpan) {\n this.#ttsRequestSpan.setAttribute(traceTypes.ATTR_TTS_METRICS, JSON.stringify(metrics));\n this.#ttsRequestSpan.end();\n this.#ttsRequestSpan = undefined;\n }\n\n this.#tts.emit('metrics_collected', metrics);\n }\n\n /** Collect every frame into one in a single call */\n async collect(): Promise<AudioFrame> {\n const frames = [];\n for await (const event of this) {\n frames.push(event.frame);\n }\n return mergeFrames(frames);\n }\n\n next(): Promise<IteratorResult<SynthesizedAudio>> {\n return this.output.next();\n }\n\n /** Close both the input and output of the TTS stream */\n close() {\n if (!this.queue.closed) this.queue.close();\n if (!this.output.closed) this.output.close();\n if (!this.abortController.signal.aborted) this.abortController.abort();\n this.closed = true;\n }\n\n [Symbol.asyncIterator](): ChunkedStream {\n return this;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAMA,yBAA6B;AAE7B,wBAA6C;AAC7C,iBAAoB;AAEpB,6BAAuC;AACvC,uBAAoD;AACpD,mBAAsF;AACtF,mBAA2E;AAwDpE,MAAe,YAAa,gCAAsD;AAAA,EACvF;AAAA,EACA;AAAA,EACA;AAAA,EAGA,YAAY,YAAoB,aAAqB,cAA+B;AAClF,UAAM;AACN,SAAK,gBAAgB;AACrB,SAAK,cAAc;AACnB,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA,EAGA,IAAI,eAAgC;AAClC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,aAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAsB;AACxB,WAAO,KAAK;AAAA,EACd;AAAA,EAkBA,MAAM,QAAuB;AAC3B;AAAA,EACF;AACF;AAgBO,MAAe,iBAEtB;AAAA,EACE,OAA0B,iBAAiB,OAAO,gBAAgB;AAAA,EAClE,OAAgB,gBAAgB,OAAO,eAAe;AAAA,EAC5C,QAAQ,IAAI,gCAAoE;AAAA,EAChF,QAAQ,IAAI,gCAEpB;AAAA,EACQ,SAAS,IAAI,gCAErB;AAAA,EACQ,SAAS;AAAA,EACT;AAAA,EACA,kBAAkB,IAAI,gBAAgB;AAAA,EAExC;AAAA,EAGA,aAAS,gBAAI;AAAA,EAIrB;AAAA,EACA,uBAAiC,CAAC;AAAA,EAClC,eAAe;AAAA,EACf;AAAA,EACA;AAAA,EAEA,YAAY,KAAU,cAAiC,0CAA6B;AAClF,SAAK,OAAO;AACZ,SAAK,cAAc;AACnB,SAAK,sBAAsB,IAAI,8CAAuB;AACtD,SAAK,UAAU;AAEf,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAC1D,WAAK,oBAAoB,aAAa;AAEtC,UAAI,CAAC,KAAK,MAAM,OAAQ,MAAK,MAAM,MAAM;AACzC,UAAI,CAAC,KAAK,OAAO,OAAQ,MAAK,OAAO,MAAM;AAC3C,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,gCAAU,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EACnE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,cAAc;AAAA,MACjB,CAAC,4BAAW,kBAAkB,GAAG;AAAA,MACjC,CAAC,4BAAW,cAAc,GAAG,KAAK,KAAK;AAAA,IACzC,CAAC;AAED,aAAS,IAAI,GAAG,IAAI,KAAK,YAAY,WAAW,GAAG,KAAK;AACtD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,aAAa,CAAC;AAE1D,cAAI,KAAK,YAAY,aAAa,KAAK,CAAC,MAAM,WAAW;AACvD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,YAAY,UAAU;AAC1C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,YAAY,WAAW,CAAC;AAAA,cACjF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AAGL,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,OAAO,SAAS,IAAI,GAAG,MAAM;AAAA,cAC9C,6CAA6C,aAAa;AAAA,YAC5D;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAgB,YAAY;AAC1B,UAAM,SAAS,KAAK,oBAAoB,OAAO,UAAU;AACzD,QAAI;AACF,aAAO,MAAM;AACX,cAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,YAAI,QAAQ,UAAU,iBAAiB,gBAAgB;AACrD;AAAA,QACF;AACA,aAAK,SAAS,KAAK;AAAA,MACrB;AACA,WAAK,SAAS;AAAA,IAChB,SAAS,OAAO;AACd,WAAK,OAAO,MAAM,OAAO,qCAAqC;AAAA,IAChE,UAAE;AACA,aAAO,YAAY;AAEnB,UAAI,CAAC,KAAK,qBAAqB;AAE7B,aAAK,OAAO,MAAM;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,kBAAkB;AACtB,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAEhB,UAAM,OAAO,MAAM;AACjB,UAAI,KAAK,qBAAqB,QAAQ;AACpC,cAAM,OAAO,KAAK,qBAAqB,MAAM;AAC7C,cAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,cAAM,yBAAyB,KAAK,MAAM,eAAe;AACzD,cAAM,UAAsB;AAAA,UAC1B,MAAM;AAAA,UACN,WAAW,KAAK,IAAI;AAAA,UACpB;AAAA,UACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,UAC5E,YAAY,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAAA,UACzD,iBAAiB,KAAK;AAAA,UACtB,iBAAiB;AAAA,UACjB,WAAW,KAAK,gBAAgB,OAAO;AAAA,UACvC,OAAO,KAAK,KAAK;AAAA,UACjB,UAAU;AAAA,QACZ;AACA,YAAI,KAAK,iBAAiB;AACxB,eAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAAA,QACxF;AACA,aAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,MAC7C;AAAA,IACF;AAEA,qBAAiB,SAAS,KAAK,OAAO;AACpC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,KAAK;AACrB,UAAI,UAAU,iBAAiB,cAAe;AAC9C,kBAAY,MAAM;AAClB,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AAEA,yBAAoB,MAAM,MAAM,oBAAoB,MAAM,MAAM,aAAc;AAC9E,UAAI,MAAM,OAAO;AACf,aAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW;AACb,WAAK;AAAA,IACP;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,IAAI;AACzB,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA,EAIA,kBAAkB,MAA8B;AAC9C,SAAK,oBAAoB,UAAU,IAAI;AAAA,EACzC;AAAA;AAAA;AAAA,EAIA,SAAS,MAAc;AACrB,QAAI,CAAC,KAAK,qBAAqB;AAC7B,WAAK,sBAAsB,KAAK,eAAe;AAE/C,WAAK,oBAAoB,QAAQ,MAAM,KAAK,OAAO,MAAM,CAAC;AAAA,IAC5D;AACA,SAAK,gBAAgB;AAErB,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,IAAI;AAAA,EACrB;AAAA;AAAA,EAGA,QAAQ;AACN,QAAI,KAAK,cAAc;AACrB,WAAK,qBAAqB,KAAK,KAAK,YAAY;AAChD,WAAK,eAAe;AAAA,IACtB;AAEA,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,IAAI,iBAAiB,cAAc;AAAA,EAChD;AAAA;AAAA,EAGA,WAAW;AACT,SAAK,MAAM;AAEX,QAAI,KAAK,MAAM,UAAU,KAAK,QAAQ;AAEpC;AAAA,IACF;AAEA,SAAK,MAAM,MAAM;AAAA,EACnB;AAAA,EAEA,OAA0F;AACxF,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,IAAI,cAA2B;AAC7B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA;AAAA,EAGA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAsB;AACzC,WAAO;AAAA,EACT;AACF;AAgBO,MAAe,cAAiE;AAAA,EAC3E,QAAQ,IAAI,gCAAqC;AAAA,EACjD,SAAS,IAAI,gCAAqC;AAAA,EAClD,SAAS;AAAA,EAEnB;AAAA,EACA;AAAA,EACA;AAAA,EACQ;AAAA,EACA,aAAS,gBAAI;AAAA,EAEX,kBAAkB,IAAI,gBAAgB;AAAA,EAEhD,YACE,MACA,KACA,cAAiC,0CACjC,aACA;AACA,SAAK,QAAQ;AACb,SAAK,OAAO;AACZ,SAAK,eAAe;AAEpB,QAAI,aAAa;AACf,kBAAY,iBAAiB,SAAS,MAAM,KAAK,gBAAgB,MAAM,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA,IAC1F;AAEA,SAAK,eAAe;AAMpB,YAAQ,QAAQ,EAAE,KAAK,MAAM,KAAK,SAAS,EAAE,QAAQ,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChF;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,cAAc;AAAA,MACjB,CAAC,4BAAW,kBAAkB,GAAG;AAAA,MACjC,CAAC,4BAAW,cAAc,GAAG,KAAK,KAAK;AAAA,IACzC,CAAC;AAED,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,oBAAgB,+BAAiB,KAAK,cAAc,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AAGL,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,OAAO,SAAS,IAAI,GAAG,MAAM;AAAA,cAC9C,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,WAAW;AACvB,WAAO,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,MACtE,MAAM;AAAA,MACN,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAAA,EAEQ,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAIA,IAAI,YAAoB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,cAA2B;AAC7B,WAAO,KAAK,gBAAgB;AAAA,EAC9B;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,kBAAkB;AACtB,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAEhB,qBAAiB,SAAS,KAAK,OAAO;AACpC,WAAK,OAAO,IAAI,KAAK;AACrB,kBAAY,MAAM;AAClB,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AACA,yBAAoB,MAAM,MAAM,oBAAoB,MAAM,MAAM,aAAc;AAAA,IAChF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E,YAAY,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAAA,MACzD,iBAAiB,KAAK,MAAM;AAAA,MAC5B,iBAAiB,KAAK,MAAM,eAAe;AAAA,MAC3C,WAAW;AAAA;AAAA,MACX,OAAO,KAAK,KAAK;AAAA,MACjB,UAAU;AAAA,IACZ;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AACtF,WAAK,gBAAgB,IAAI;AACzB,WAAK,kBAAkB;AAAA,IACzB;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAGA,MAAM,UAA+B;AACnC,UAAM,SAAS,CAAC;AAChB,qBAAiB,SAAS,MAAM;AAC9B,aAAO,KAAK,MAAM,KAAK;AAAA,IACzB;AACA,eAAO,0BAAY,MAAM;AAAA,EAC3B;AAAA,EAEA,OAAkD;AAChD,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA;AAAA,EAGA,QAAQ;AACN,QAAI,CAAC,KAAK,MAAM,OAAQ,MAAK,MAAM,MAAM;AACzC,QAAI,CAAC,KAAK,OAAO,OAAQ,MAAK,OAAO,MAAM;AAC3C,QAAI,CAAC,KAAK,gBAAgB,OAAO,QAAS,MAAK,gBAAgB,MAAM;AACrE,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,CAAC,OAAO,aAAa,IAAmB;AACtC,WAAO;AAAA,EACT;AACF;","names":[]}