@mastra/voice-elevenlabs 0.11.12 → 0.12.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,31 +1,19 @@
1
1
  # @mastra/voice-elevenlabs
2
2
 
3
- ## 0.11.12
3
+ ## 0.12.0-beta.0
4
4
 
5
- ### Patch Changes
6
-
7
- - update peerdeps ([`5ca1cca`](https://github.com/mastra-ai/mastra/commit/5ca1ccac61ffa7141e6d9fa8f22d3ad4d03bf5dc))
8
-
9
- - Updated dependencies [[`5ca1cca`](https://github.com/mastra-ai/mastra/commit/5ca1ccac61ffa7141e6d9fa8f22d3ad4d03bf5dc), [`6d7e90d`](https://github.com/mastra-ai/mastra/commit/6d7e90db09713e6250f4d6c3d3cff1b4740e50f9), [`f78b908`](https://github.com/mastra-ai/mastra/commit/f78b9080e11af765969b36b4a619761056030840), [`23c2614`](https://github.com/mastra-ai/mastra/commit/23c26140fdbf04b8c59e8d7d52106d67dad962ec), [`e365eda`](https://github.com/mastra-ai/mastra/commit/e365eda45795b43707310531cac1e2ce4e5a0712)]:
10
- - @mastra/core@0.24.0
11
-
12
- ## 0.11.12-alpha.0
13
-
14
- ### Patch Changes
5
+ ### Minor Changes
15
6
 
16
- - update peerdeps ([`5ca1cca`](https://github.com/mastra-ai/mastra/commit/5ca1ccac61ffa7141e6d9fa8f22d3ad4d03bf5dc))
7
+ - Update peer dependencies to match core package version bump (1.0.0) ([#9237](https://github.com/mastra-ai/mastra/pull/9237))
17
8
 
18
- - Updated dependencies [[`5ca1cca`](https://github.com/mastra-ai/mastra/commit/5ca1ccac61ffa7141e6d9fa8f22d3ad4d03bf5dc), [`6d7e90d`](https://github.com/mastra-ai/mastra/commit/6d7e90db09713e6250f4d6c3d3cff1b4740e50f9), [`f78b908`](https://github.com/mastra-ai/mastra/commit/f78b9080e11af765969b36b4a619761056030840), [`23c2614`](https://github.com/mastra-ai/mastra/commit/23c26140fdbf04b8c59e8d7d52106d67dad962ec), [`e365eda`](https://github.com/mastra-ai/mastra/commit/e365eda45795b43707310531cac1e2ce4e5a0712)]:
19
- - @mastra/core@0.24.0-alpha.0
9
+ - Bump minimum required Node.js version to 22.13.0 ([#9706](https://github.com/mastra-ai/mastra/pull/9706))
20
10
 
21
- ## 0.11.11
11
+ - Removed old tracing code based on OpenTelemetry ([#9237](https://github.com/mastra-ai/mastra/pull/9237))
22
12
 
23
13
  ### Patch Changes
24
14
 
25
- - Fix peerdependencies ([`eb7c1c8`](https://github.com/mastra-ai/mastra/commit/eb7c1c8c592d8fb16dfd250e337d9cdc73c8d5de))
26
-
27
- - Updated dependencies []:
28
- - @mastra/core@0.23.1
15
+ - Updated dependencies [[`39c9743`](https://github.com/mastra-ai/mastra/commit/39c97432d084294f8ba85fbf3ef28098ff21459e), [`f743dbb`](https://github.com/mastra-ai/mastra/commit/f743dbb8b40d1627b5c10c0e6fc154f4ebb6e394), [`fec5129`](https://github.com/mastra-ai/mastra/commit/fec5129de7fc64423ea03661a56cef31dc747a0d), [`0491e7c`](https://github.com/mastra-ai/mastra/commit/0491e7c9b714cb0ba22187ee062147ec2dd7c712), [`f6f4903`](https://github.com/mastra-ai/mastra/commit/f6f4903397314f73362061dc5a3e8e7c61ea34aa), [`0e8ed46`](https://github.com/mastra-ai/mastra/commit/0e8ed467c54d6901a6a365f270ec15d6faadb36c), [`6c049d9`](https://github.com/mastra-ai/mastra/commit/6c049d94063fdcbd5b81c4912a2bf82a92c9cc0b), [`2f897df`](https://github.com/mastra-ai/mastra/commit/2f897df208508f46f51b7625e5dd20c37f93e0e3), [`3443770`](https://github.com/mastra-ai/mastra/commit/3443770662df8eb24c9df3589b2792d78cfcb811), [`f0a07e0`](https://github.com/mastra-ai/mastra/commit/f0a07e0111b3307c5fabfa4094c5c2cfb734fbe6), [`aaa40e7`](https://github.com/mastra-ai/mastra/commit/aaa40e788628b319baa8e889407d11ad626547fa), [`1521d71`](https://github.com/mastra-ai/mastra/commit/1521d716e5daedc74690c983fbd961123c56756b), [`9e1911d`](https://github.com/mastra-ai/mastra/commit/9e1911db2b4db85e0e768c3f15e0d61e319869f6), [`ebac155`](https://github.com/mastra-ai/mastra/commit/ebac15564a590117db7078233f927a7e28a85106), [`dd1c38d`](https://github.com/mastra-ai/mastra/commit/dd1c38d1b75f1b695c27b40d8d9d6ed00d5e0f6f), [`5948e6a`](https://github.com/mastra-ai/mastra/commit/5948e6a5146c83666ba3f294b2be576c82a513fb), [`8940859`](https://github.com/mastra-ai/mastra/commit/89408593658199b4ad67f7b65e888f344e64a442), [`e629310`](https://github.com/mastra-ai/mastra/commit/e629310f1a73fa236d49ec7a1d1cceb6229dc7cc), [`4c6b492`](https://github.com/mastra-ai/mastra/commit/4c6b492c4dd591c6a592520c1f6855d6e936d71f), [`dff01d8`](https://github.com/mastra-ai/mastra/commit/dff01d81ce1f4e4087cfac20fa868e6db138dd14), [`9d819d5`](https://github.com/mastra-ai/mastra/commit/9d819d54b61481639f4008e4694791bddf187edd), [`71c8d6c`](https://github.com/mastra-ai/mastra/commit/71c8d6c161253207b2b9588bdadb7eed604f7253), [`6179a9b`](https://github.com/mastra-ai/mastra/commit/6179a9ba36ffac326de3cc3c43cdc8028d37c251), [`00f4921`](https://github.com/mastra-ai/mastra/commit/00f4921dd2c91a1e5446799599ef7116a8214a1a), [`ca8041c`](https://github.com/mastra-ai/mastra/commit/ca8041cce0379fda22ed293a565bcb5b6ddca68a), [`7051bf3`](https://github.com/mastra-ai/mastra/commit/7051bf38b3b122a069008f861f7bfc004a6d9f6e), [`a8f1494`](https://github.com/mastra-ai/mastra/commit/a8f1494f4bbdc2770bcf327d4c7d869e332183f1), [`0793497`](https://github.com/mastra-ai/mastra/commit/079349753620c40246ffd673e3f9d7d9820beff3), [`5df9cce`](https://github.com/mastra-ai/mastra/commit/5df9cce1a753438413f64c11eeef8f845745c2a8), [`a854ede`](https://github.com/mastra-ai/mastra/commit/a854ede62bf5ac0945a624ac48913dd69c73aabf), [`c576fc0`](https://github.com/mastra-ai/mastra/commit/c576fc0b100b2085afded91a37c97a0ea0ec09c7), [`3defc80`](https://github.com/mastra-ai/mastra/commit/3defc80cf2b88a1b7fc1cc4ddcb91e982a614609), [`16153fe`](https://github.com/mastra-ai/mastra/commit/16153fe7eb13c99401f48e6ca32707c965ee28b9), [`9f4a683`](https://github.com/mastra-ai/mastra/commit/9f4a6833e88b52574665c028fd5508ad5c2f6004), [`bc94344`](https://github.com/mastra-ai/mastra/commit/bc943444a1342d8a662151b7bce1df7dae32f59c), [`57d157f`](https://github.com/mastra-ai/mastra/commit/57d157f0b163a95c3e6c9eae31bdb11d1bfc64f9), [`903f67d`](https://github.com/mastra-ai/mastra/commit/903f67d184504a273893818c02b961f5423a79ad), [`2a90c55`](https://github.com/mastra-ai/mastra/commit/2a90c55a86a9210697d5adaab5ee94584b079adc), [`eb09742`](https://github.com/mastra-ai/mastra/commit/eb09742197f66c4c38154c3beec78313e69760b2), [`96d35f6`](https://github.com/mastra-ai/mastra/commit/96d35f61376bc2b1bf148648a2c1985bd51bef55), [`5cbe88a`](https://github.com/mastra-ai/mastra/commit/5cbe88aefbd9f933bca669fd371ea36bf939ac6d), [`a1bd7b8`](https://github.com/mastra-ai/mastra/commit/a1bd7b8571db16b94eb01588f451a74758c96d65), [`d78b38d`](https://github.com/mastra-ai/mastra/commit/d78b38d898fce285260d3bbb4befade54331617f), [`0633100`](https://github.com/mastra-ai/mastra/commit/0633100a911ad22f5256471bdf753da21c104742), [`c710c16`](https://github.com/mastra-ai/mastra/commit/c710c1652dccfdc4111c8412bca7a6bb1d48b441), [`354ad0b`](https://github.com/mastra-ai/mastra/commit/354ad0b7b1b8183ac567f236a884fc7ede6d7138), [`cfae733`](https://github.com/mastra-ai/mastra/commit/cfae73394f4920635e6c919c8e95ff9a0788e2e5), [`e3dfda7`](https://github.com/mastra-ai/mastra/commit/e3dfda7b11bf3b8c4bb55637028befb5f387fc74), [`844ea5d`](https://github.com/mastra-ai/mastra/commit/844ea5dc0c248961e7bf73629ae7dcff503e853c), [`398fde3`](https://github.com/mastra-ai/mastra/commit/398fde3f39e707cda79372cdae8f9870e3b57c8d), [`f0f8f12`](https://github.com/mastra-ai/mastra/commit/f0f8f125c308f2d0fd36942ef652fd852df7522f), [`0d7618b`](https://github.com/mastra-ai/mastra/commit/0d7618bc650bf2800934b243eca5648f4aeed9c2), [`7b763e5`](https://github.com/mastra-ai/mastra/commit/7b763e52fc3eaf699c2a99f2adf418dd46e4e9a5), [`d36cfbb`](https://github.com/mastra-ai/mastra/commit/d36cfbbb6565ba5f827883cc9bb648eb14befdc1), [`3697853`](https://github.com/mastra-ai/mastra/commit/3697853deeb72017d90e0f38a93c1e29221aeca0), [`b2e45ec`](https://github.com/mastra-ai/mastra/commit/b2e45eca727a8db01a81ba93f1a5219c7183c839), [`d6d49f7`](https://github.com/mastra-ai/mastra/commit/d6d49f7b8714fa19a52ff9c7cf7fb7e73751901e), [`a534e95`](https://github.com/mastra-ai/mastra/commit/a534e9591f83b3cc1ebff99c67edf4cda7bf81d3), [`9d0e7fe`](https://github.com/mastra-ai/mastra/commit/9d0e7feca8ed98de959f53476ee1456073673348), [`53d927c`](https://github.com/mastra-ai/mastra/commit/53d927cc6f03bff33655b7e2b788da445a08731d), [`3f2faf2`](https://github.com/mastra-ai/mastra/commit/3f2faf2e2d685d6c053cc5af1bf9fedf267b2ce5), [`22f64bc`](https://github.com/mastra-ai/mastra/commit/22f64bc1d37149480b58bf2fefe35b79a1e3e7d5), [`83d5942`](https://github.com/mastra-ai/mastra/commit/83d5942669ce7bba4a6ca4fd4da697a10eb5ebdc), [`b7959e6`](https://github.com/mastra-ai/mastra/commit/b7959e6e25a46b480f9ea2217c4c6c588c423791), [`bda6370`](https://github.com/mastra-ai/mastra/commit/bda637009360649aaf579919e7873e33553c273e), [`d7acd8e`](https://github.com/mastra-ai/mastra/commit/d7acd8e987b5d7eff4fd98b0906c17c06a2e83d5), [`c7f1f7d`](https://github.com/mastra-ai/mastra/commit/c7f1f7d24f61f247f018cc2d1f33bf63212959a7), [`0bddc6d`](https://github.com/mastra-ai/mastra/commit/0bddc6d8dbd6f6008c0cba2e4960a2da75a55af1), [`735d8c1`](https://github.com/mastra-ai/mastra/commit/735d8c1c0d19fbc09e6f8b66cf41bc7655993838), [`acf322e`](https://github.com/mastra-ai/mastra/commit/acf322e0f1fd0189684cf529d91c694bea918a45), [`c942802`](https://github.com/mastra-ai/mastra/commit/c942802a477a925b01859a7b8688d4355715caaa), [`a0c8c1b`](https://github.com/mastra-ai/mastra/commit/a0c8c1b87d4fee252aebda73e8637fbe01d761c9), [`cc34739`](https://github.com/mastra-ai/mastra/commit/cc34739c34b6266a91bea561119240a7acf47887), [`c218bd3`](https://github.com/mastra-ai/mastra/commit/c218bd3759e32423735b04843a09404572631014), [`2c4438b`](https://github.com/mastra-ai/mastra/commit/2c4438b87817ab7eed818c7990fef010475af1a3), [`2b8893c`](https://github.com/mastra-ai/mastra/commit/2b8893cb108ef9acb72ee7835cd625610d2c1a4a), [`8e5c75b`](https://github.com/mastra-ai/mastra/commit/8e5c75bdb1d08a42d45309a4c72def4b6890230f), [`e59e0d3`](https://github.com/mastra-ai/mastra/commit/e59e0d32afb5fcf2c9f3c00c8f81f6c21d3a63fa), [`fa8409b`](https://github.com/mastra-ai/mastra/commit/fa8409bc39cfd8ba6643b9db5269b90b22e2a2f7), [`173c535`](https://github.com/mastra-ai/mastra/commit/173c535c0645b0da404fe09f003778f0b0d4e019)]:
16
+ - @mastra/core@1.0.0-beta.0
29
17
 
30
18
  ## 0.11.10
31
19
 
package/dist/index.cjs CHANGED
@@ -48,16 +48,13 @@ var ElevenLabsVoice = class extends voice.MastraVoice {
48
48
  * A promise that resolves to an array of speaker objects.
49
49
  */
50
50
  async getSpeakers() {
51
- const res = await this.traced(async () => {
52
- const voices = await this.client.voices.getAll();
53
- return voices?.voices?.map((voice) => ({
54
- voiceId: voice.voice_id,
55
- name: voice.name,
56
- language: voice.labels?.language || "en",
57
- gender: voice.labels?.gender || "neutral"
58
- })) ?? [];
59
- }, "voice.elevenlabs.voices")();
60
- return res;
51
+ const voices = await this.client.voices.getAll();
52
+ return voices?.voices?.map((voice) => ({
53
+ voiceId: voice.voice_id,
54
+ name: voice.name,
55
+ language: voice.labels?.language || "en",
56
+ gender: voice.labels?.gender || "neutral"
57
+ })) ?? [];
61
58
  }
62
59
  async streamToString(stream) {
63
60
  const chunks = [];
@@ -90,15 +87,12 @@ var ElevenLabsVoice = class extends voice.MastraVoice {
90
87
  throw new Error("No speech model specified");
91
88
  }
92
89
  const text = typeof input === "string" ? input : await this.streamToString(input);
93
- const res = await this.traced(async () => {
94
- return await this.client.generate({
95
- text,
96
- voice: speaker,
97
- model_id: this.speechModel?.name,
98
- stream: true
99
- });
100
- }, "voice.elevenlabs.speak")();
101
- return res;
90
+ return await this.client.generate({
91
+ text,
92
+ voice: speaker,
93
+ model_id: this.speechModel?.name,
94
+ stream: true
95
+ });
102
96
  }
103
97
  /**
104
98
  * Checks if listening capabilities are enabled.
@@ -125,31 +119,28 @@ var ElevenLabsVoice = class extends voice.MastraVoice {
125
119
  *
126
120
  */
127
121
  async listen(input, options) {
128
- const res = await this.traced(async () => {
129
- const chunks = [];
130
- for await (const chunk of input) {
131
- if (typeof chunk === "string") {
132
- chunks.push(Buffer.from(chunk));
133
- } else {
134
- chunks.push(chunk);
135
- }
122
+ const chunks = [];
123
+ for await (const chunk of input) {
124
+ if (typeof chunk === "string") {
125
+ chunks.push(Buffer.from(chunk));
126
+ } else {
127
+ chunks.push(chunk);
136
128
  }
137
- const buffer = Buffer.concat(chunks);
138
- const { language_code, tag_audio_events, num_speakers, filetype, ...requestOptions } = options || {};
139
- const file = new File([buffer], `audio.${filetype || "mp3"}`);
140
- const transcription = await this.client.speechToText.convert(
141
- {
142
- file,
143
- model_id: this.listeningModel?.name,
144
- language_code,
145
- tag_audio_events,
146
- num_speakers
147
- },
148
- requestOptions
149
- );
150
- return transcription.text;
151
- }, "voice.elevenlabs.listen")();
152
- return res;
129
+ }
130
+ const buffer = Buffer.concat(chunks);
131
+ const { language_code, tag_audio_events, num_speakers, filetype, ...requestOptions } = options || {};
132
+ const file = new File([buffer], `audio.${filetype || "mp3"}`);
133
+ const transcription = await this.client.speechToText.convert(
134
+ {
135
+ file,
136
+ model_id: this.listeningModel?.name,
137
+ language_code,
138
+ tag_audio_events,
139
+ num_speakers
140
+ },
141
+ requestOptions
142
+ );
143
+ return transcription.text;
153
144
  }
154
145
  };
155
146
 
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts"],"names":["MastraVoice","ElevenLabsClient"],"mappings":";;;;;;AAkCO,IAAM,eAAA,GAAN,cAA8BA,iBAAA,CAAY;AAAA,EACvC,MAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWR,WAAA,CAAY;AAAA,IACV,WAAA;AAAA,IACA,cAAA;AAAA,IACA;AAAA,GACF,GAAuG,EAAC,EAAG;AACzG,IAAA,MAAM,MAAA,GAAS,WAAA,EAAa,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,kBAAA;AAClD,IAAA,KAAA,CAAM;AAAA,MACJ,WAAA,EAAa;AAAA,QACX,IAAA,EAAM,aAAa,IAAA,IAAQ,wBAAA;AAAA,QAC3B,QAAQ,WAAA,EAAa;AAAA,OACvB;AAAA,MACA,cAAA,EAAgB;AAAA,QACd,IAAA,EAAM,gBAAgB,IAAA,IAAQ,WAAA;AAAA,QAC9B,QAAQ,cAAA,EAAgB;AAAA,OAC1B;AAAA,MACA;AAAA,KACD,CAAA;AAED,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,IAAI,MAAM,+BAA+B,CAAA;AAAA,IACjD;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAIC,2BAAA,CAAiB;AAAA,MACjC;AAAA,KACD,CAAA;AAED,IAAA,IAAA,CAAK,UAAU,OAAA,IAAW,sBAAA;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,WAAA,GAAc;AAClB,IAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,MAAA,CAAO,YAAY;AACxC,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,MAAA,CAAO,OAAO,MAAA,EAAO;AAC/C,MAAA,OACE,MAAA,EAAQ,MAAA,EAAQ,GAAA,CAAI,CAAA,KAAA,MAAU;AAAA,QAC5B,SAAS,KAAA,CAAM,QAAA;AAAA,QACf,MAAM,KAAA,CAAM,IAAA;AAAA,QACZ,QAAA,EAAU,KAAA,CAAM,MAAA,EAAQ,QAAA,IAAY,IAAA;AAAA,QACpC,MAAA,EAAQ,KAAA,CAAM,MAAA,EAAQ,MAAA,IAAU;AAAA,OAClC,CAAE,KAAK,EAAC;AAAA,IAEZ,CAAA,EAAG,yBAAyB,CAAA,EAAE;AAE9B,IAAA,OAAO,GAAA;AAAA,EACT;AAAA,EAEA,MAAc,eAAe,MAAA,EAAgD;AAC3E,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAChC,MAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAAA,MAChC,CAAA,MAAO;AACL,QAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,MACnB;AAAA,IACF;AACA,IAAA,OAAO,MAAA,CAAO,MAAA,CAAO,MAAM,CAAA,CAAE,SAAS,OAAO,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,KAAA,CAAM,KAAA,EAAuC,OAAA,EAAgE;AACjH,IAAA,MAAM,OAAA,GAAU,OAAA,EAAS,OAAA,IAAW,IAAA,CAAK,OAAA;AACzC,IAAA,IAAI,CAAC,OAAA,EAAS;AACZ,MAAA,MAAM,IAAI,MAAM,sBAAsB,CAAA;AAAA,IACxC;AAEA,IAAA,IAAI,CAAC,IAAA,CAAK,WAAA,EAAa,IAAA,EAAM;AAC3B,MAAA,MAAM,IAAI,MAAM,2BAA2B,CAAA;AAAA,IAC7C;AACA,IAAA,MAAM,IAAA,GAAO,OAAO,KAAA,KAAU,QAAA,GAAW,QAAQ,MAAM,IAAA,CAAK,eAAe,KAAK,CAAA;AAChF,IAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,MAAA,CAAO,YAAY;AACxC,MAAA,OAAO,MAAM,IAAA,CAAK,MAAA,CAAO,QAAA,CAAS;AAAA,QAChC,IAAA;AAAA,QACA,KAAA,EAAO,OAAA;AAAA,QACP,QAAA,EAAU,KAAK,WAAA,EAAa,IAAA;AAAA,QAC5B,MAAA,EAAQ;AAAA,OACT,CAAA;AAAA,IACH,CAAA,EAAG,wBAAwB,CAAA,EAAE;AAE7B,IAAA,OAAO,GAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,WAAA,GAAc;AAClB,IAAA,OAAO,EAAE,SAAS,IAAA,EAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBA,MAAM,MAAA,CAAO,KAAA,EAA8B,OAAA,EAAoD;AAC7F,IAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,MAAA,CAAO,YAAY;AACxC,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,WAAA,MAAiB,SAAS,KAAA,EAAO;AAC/B,QAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,UAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAAA,QAChC,CAAA,MAAO;AACL,UAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,QACnB;AAAA,MACF;AACA,MAAA,MAAM,MAAA,GAAS,MAAA,CAAO,MAAA,CAAO,MAAM,CAAA;AAEnC,MAAA,MAAM,EAAE,eAAe,gBAAA,EAAkB,YAAA,EAAc,UAAU,GAAG,cAAA,EAAe,GAAI,OAAA,IAAW,EAAC;AAEnG,MAAA,MAAM,IAAA,GAAO,IAAI,IAAA,CAAK,CAAC,MAAM,CAAA,EAAG,CAAA,MAAA,EAAS,QAAA,IAAY,KAAK,CAAA,CAAE,CAAA;AAE5D,MAAA,MAAM,aAAA,GAAgB,MAAM,IAAA,CAAK,MAAA,CAAO,YAAA,CAAa,OAAA;AAAA,QACnD;AAAA,UACE,IAAA;AAAA,UACA,QAAA,EAAU,KAAK,cAAA,EAAgB,IAAA;AAAA,UAC/B,aAAA;AAAA,UACA,gBAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA;AAAA,OACF;AAEA,MAAA,OAAO,aAAA,CAAc,IAAA;AAAA,IACvB,CAAA,EAAG,yBAAyB,CAAA,EAAE;AAE9B,IAAA,OAAO,GAAA;AAAA,EACT;AACF","file":"index.cjs","sourcesContent":["import { MastraVoice } from '@mastra/core/voice';\nimport { ElevenLabsClient } from 'elevenlabs';\n\ntype ElevenLabsModel =\n | 'eleven_multilingual_v2'\n | 'eleven_flash_v2_5'\n | 'eleven_flash_v2'\n | 'eleven_multilingual_sts_v2'\n | 'eleven_english_sts_v2'\n | 'scribe_v1';\n\ninterface ElevenLabsVoiceConfig {\n name?: ElevenLabsModel;\n apiKey?: string;\n}\n\ninterface SpeechToTextOptions {\n language_code?: string;\n tag_audio_events?: boolean;\n num_speakers?: number;\n filetype?: string;\n}\n\ninterface RequestOptions {\n timeoutInSeconds?: number;\n maxRetries?: number;\n abortSignal?: AbortSignal;\n apiKey?: string | undefined;\n headers?: Record<string, string>;\n}\n\n// Combined options type\ntype ElevenLabsListenOptions = SpeechToTextOptions & RequestOptions;\n\nexport class ElevenLabsVoice extends MastraVoice {\n private client: ElevenLabsClient;\n\n /**\n * Creates an instance of the ElevenLabsVoice class.\n *\n * @param {Object} options - The options for the voice configuration.\n * @param {ElevenLabsVoiceConfig} [options.speechModel] - The configuration for the speech model, including the model name and API key.\n * @param {string} [options.speaker] - The ID of the speaker to use. If not provided, a default speaker will be used.\n *\n * @throws {Error} If the ELEVENLABS_API_KEY is not set in the environment variables.\n */\n constructor({\n speechModel,\n listeningModel,\n speaker,\n }: { speechModel?: ElevenLabsVoiceConfig; listeningModel?: ElevenLabsVoiceConfig; speaker?: string } = {}) {\n const apiKey = speechModel?.apiKey ?? process.env.ELEVENLABS_API_KEY;\n super({\n speechModel: {\n name: speechModel?.name ?? 'eleven_multilingual_v2',\n apiKey: speechModel?.apiKey,\n },\n listeningModel: {\n name: listeningModel?.name ?? 'scribe_v1',\n apiKey: listeningModel?.apiKey,\n },\n speaker,\n });\n\n if (!apiKey) {\n throw new Error('ELEVENLABS_API_KEY is not set');\n }\n\n this.client = new ElevenLabsClient({\n apiKey,\n });\n\n this.speaker = speaker || '9BWtsMINqrJLrRacOk9x'; // Aria is the default speaker\n }\n\n /**\n * Retrieves a list of available speakers from the Eleven Labs API.\n * Each speaker includes their ID, name, language, and gender.\n *\n * @returns {Promise<Array<{ voiceId: string, name: string, language: string, gender: string }>>}\n * A promise that resolves to an array of speaker objects.\n */\n async getSpeakers() {\n const res = await this.traced(async () => {\n const voices = await this.client.voices.getAll();\n return (\n voices?.voices?.map(voice => ({\n voiceId: voice.voice_id,\n name: voice.name,\n language: voice.labels?.language || 'en',\n gender: voice.labels?.gender || 'neutral',\n })) ?? []\n );\n }, 'voice.elevenlabs.voices')();\n\n return res;\n }\n\n private async streamToString(stream: NodeJS.ReadableStream): Promise<string> {\n const chunks: Buffer[] = [];\n for await (const chunk of stream) {\n if (typeof chunk === 'string') {\n chunks.push(Buffer.from(chunk));\n } else {\n chunks.push(chunk);\n }\n }\n return Buffer.concat(chunks).toString('utf-8');\n }\n\n /**\n * Converts text or audio input into speech using the Eleven Labs API.\n *\n * @param {string | NodeJS.ReadableStream} input - The text to be converted to speech or a stream containing audio data.\n * @param {Object} [options] - Optional parameters for the speech generation.\n * @param {string} [options.speaker] - The ID of the speaker to use for the speech. If not provided, the default speaker will be used.\n *\n * @returns {Promise<NodeJS.ReadableStream>} A promise that resolves to a readable stream of the generated speech.\n *\n * @throws {Error} If no speaker is specified or if no speech model is set.\n */\n async speak(input: string | NodeJS.ReadableStream, options?: { speaker?: string }): Promise<NodeJS.ReadableStream> {\n const speaker = options?.speaker || this.speaker;\n if (!speaker) {\n throw new Error('No speaker specified');\n }\n\n if (!this.speechModel?.name) {\n throw new Error('No speech model specified');\n }\n const text = typeof input === 'string' ? input : await this.streamToString(input);\n const res = await this.traced(async () => {\n return await this.client.generate({\n text,\n voice: speaker,\n model_id: this.speechModel?.name as ElevenLabsModel,\n stream: true,\n });\n }, 'voice.elevenlabs.speak')();\n\n return res;\n }\n\n /**\n * Checks if listening capabilities are enabled.\n *\n * @returns {Promise<{ enabled: boolean }>}\n */\n async getListener() {\n return { enabled: true };\n }\n\n /**\n * Converts audio input to text using ElevenLabs Speech-to-Text API.\n *\n * @param input - A readable stream containing the audio data to transcribe\n * @param options - Configuration options for the transcription\n * @param options.language_code - ISO language code (e.g., 'en', 'fr', 'es')\n * @param options.tag_audio_events - Whether to tag audio events like [MUSIC], [LAUGHTER], etc.\n * @param options.num_speakers - Number of speakers to detect in the audio\n * @param options.filetype - Audio file format (e.g., 'mp3', 'wav', 'ogg')\n * @param options.timeoutInSeconds - Request timeout in seconds\n * @param options.maxRetries - Maximum number of retry attempts\n * @param options.abortSignal - Signal to abort the request\n *\n * @returns A Promise that resolves to the transcribed text\n *\n */\n async listen(input: NodeJS.ReadableStream, options?: ElevenLabsListenOptions): Promise<string> {\n const res = await this.traced(async () => {\n const chunks: Buffer[] = [];\n for await (const chunk of input) {\n if (typeof chunk === 'string') {\n chunks.push(Buffer.from(chunk));\n } else {\n chunks.push(chunk);\n }\n }\n const buffer = Buffer.concat(chunks);\n\n const { language_code, tag_audio_events, num_speakers, filetype, ...requestOptions } = options || {};\n\n const file = new File([buffer], `audio.${filetype || 'mp3'}`);\n\n const transcription = await this.client.speechToText.convert(\n {\n file: file,\n model_id: this.listeningModel?.name as ElevenLabsModel,\n language_code,\n tag_audio_events,\n num_speakers,\n },\n requestOptions,\n );\n\n return transcription.text;\n }, 'voice.elevenlabs.listen')();\n\n return res;\n }\n}\n"]}
1
+ {"version":3,"sources":["../src/index.ts"],"names":["MastraVoice","ElevenLabsClient"],"mappings":";;;;;;AAkCO,IAAM,eAAA,GAAN,cAA8BA,iBAAA,CAAY;AAAA,EACvC,MAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWR,WAAA,CAAY;AAAA,IACV,WAAA;AAAA,IACA,cAAA;AAAA,IACA;AAAA,GACF,GAAuG,EAAC,EAAG;AACzG,IAAA,MAAM,MAAA,GAAS,WAAA,EAAa,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,kBAAA;AAClD,IAAA,KAAA,CAAM;AAAA,MACJ,WAAA,EAAa;AAAA,QACX,IAAA,EAAM,aAAa,IAAA,IAAQ,wBAAA;AAAA,QAC3B,QAAQ,WAAA,EAAa;AAAA,OACvB;AAAA,MACA,cAAA,EAAgB;AAAA,QACd,IAAA,EAAM,gBAAgB,IAAA,IAAQ,WAAA;AAAA,QAC9B,QAAQ,cAAA,EAAgB;AAAA,OAC1B;AAAA,MACA;AAAA,KACD,CAAA;AAED,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,IAAI,MAAM,+BAA+B,CAAA;AAAA,IACjD;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAIC,2BAAA,CAAiB;AAAA,MACjC;AAAA,KACD,CAAA;AAED,IAAA,IAAA,CAAK,UAAU,OAAA,IAAW,sBAAA;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,WAAA,GAAc;AAClB,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,MAAA,CAAO,OAAO,MAAA,EAAO;AAC/C,IAAA,OACE,MAAA,EAAQ,MAAA,EAAQ,GAAA,CAAI,CAAA,KAAA,MAAU;AAAA,MAC5B,SAAS,KAAA,CAAM,QAAA;AAAA,MACf,MAAM,KAAA,CAAM,IAAA;AAAA,MACZ,QAAA,EAAU,KAAA,CAAM,MAAA,EAAQ,QAAA,IAAY,IAAA;AAAA,MACpC,MAAA,EAAQ,KAAA,CAAM,MAAA,EAAQ,MAAA,IAAU;AAAA,KAClC,CAAE,KAAK,EAAC;AAAA,EAEZ;AAAA,EAEA,MAAc,eAAe,MAAA,EAAgD;AAC3E,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAChC,MAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAAA,MAChC,CAAA,MAAO;AACL,QAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,MACnB;AAAA,IACF;AACA,IAAA,OAAO,MAAA,CAAO,MAAA,CAAO,MAAM,CAAA,CAAE,SAAS,OAAO,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,KAAA,CAAM,KAAA,EAAuC,OAAA,EAAgE;AACjH,IAAA,MAAM,OAAA,GAAU,OAAA,EAAS,OAAA,IAAW,IAAA,CAAK,OAAA;AACzC,IAAA,IAAI,CAAC,OAAA,EAAS;AACZ,MAAA,MAAM,IAAI,MAAM,sBAAsB,CAAA;AAAA,IACxC;AAEA,IAAA,IAAI,CAAC,IAAA,CAAK,WAAA,EAAa,IAAA,EAAM;AAC3B,MAAA,MAAM,IAAI,MAAM,2BAA2B,CAAA;AAAA,IAC7C;AACA,IAAA,MAAM,IAAA,GAAO,OAAO,KAAA,KAAU,QAAA,GAAW,QAAQ,MAAM,IAAA,CAAK,eAAe,KAAK,CAAA;AAEhF,IAAA,OAAO,MAAM,IAAA,CAAK,MAAA,CAAO,QAAA,CAAS;AAAA,MAChC,IAAA;AAAA,MACA,KAAA,EAAO,OAAA;AAAA,MACP,QAAA,EAAU,KAAK,WAAA,EAAa,IAAA;AAAA,MAC5B,MAAA,EAAQ;AAAA,KACT,CAAA;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,WAAA,GAAc;AAClB,IAAA,OAAO,EAAE,SAAS,IAAA,EAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBA,MAAM,MAAA,CAAO,KAAA,EAA8B,OAAA,EAAoD;AAC7F,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,WAAA,MAAiB,SAAS,KAAA,EAAO;AAC/B,MAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAAA,MAChC,CAAA,MAAO;AACL,QAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,MACnB;AAAA,IACF;AACA,IAAA,MAAM,MAAA,GAAS,MAAA,CAAO,MAAA,CAAO,MAAM,CAAA;AAEnC,IAAA,MAAM,EAAE,eAAe,gBAAA,EAAkB,YAAA,EAAc,UAAU,GAAG,cAAA,EAAe,GAAI,OAAA,IAAW,EAAC;AAEnG,IAAA,MAAM,IAAA,GAAO,IAAI,IAAA,CAAK,CAAC,MAAM,CAAA,EAAG,CAAA,MAAA,EAAS,QAAA,IAAY,KAAK,CAAA,CAAE,CAAA;AAE5D,IAAA,MAAM,aAAA,GAAgB,MAAM,IAAA,CAAK,MAAA,CAAO,YAAA,CAAa,OAAA;AAAA,MACnD;AAAA,QACE,IAAA;AAAA,QACA,QAAA,EAAU,KAAK,cAAA,EAAgB,IAAA;AAAA,QAC/B,aAAA;AAAA,QACA,gBAAA;AAAA,QACA;AAAA,OACF;AAAA,MACA;AAAA,KACF;AAEA,IAAA,OAAO,aAAA,CAAc,IAAA;AAAA,EACvB;AACF","file":"index.cjs","sourcesContent":["import { MastraVoice } from '@mastra/core/voice';\nimport { ElevenLabsClient } from 'elevenlabs';\n\ntype ElevenLabsModel =\n | 'eleven_multilingual_v2'\n | 'eleven_flash_v2_5'\n | 'eleven_flash_v2'\n | 'eleven_multilingual_sts_v2'\n | 'eleven_english_sts_v2'\n | 'scribe_v1';\n\ninterface ElevenLabsVoiceConfig {\n name?: ElevenLabsModel;\n apiKey?: string;\n}\n\ninterface SpeechToTextOptions {\n language_code?: string;\n tag_audio_events?: boolean;\n num_speakers?: number;\n filetype?: string;\n}\n\ninterface RequestOptions {\n timeoutInSeconds?: number;\n maxRetries?: number;\n abortSignal?: AbortSignal;\n apiKey?: string | undefined;\n headers?: Record<string, string>;\n}\n\n// Combined options type\ntype ElevenLabsListenOptions = SpeechToTextOptions & RequestOptions;\n\nexport class ElevenLabsVoice extends MastraVoice {\n private client: ElevenLabsClient;\n\n /**\n * Creates an instance of the ElevenLabsVoice class.\n *\n * @param {Object} options - The options for the voice configuration.\n * @param {ElevenLabsVoiceConfig} [options.speechModel] - The configuration for the speech model, including the model name and API key.\n * @param {string} [options.speaker] - The ID of the speaker to use. If not provided, a default speaker will be used.\n *\n * @throws {Error} If the ELEVENLABS_API_KEY is not set in the environment variables.\n */\n constructor({\n speechModel,\n listeningModel,\n speaker,\n }: { speechModel?: ElevenLabsVoiceConfig; listeningModel?: ElevenLabsVoiceConfig; speaker?: string } = {}) {\n const apiKey = speechModel?.apiKey ?? process.env.ELEVENLABS_API_KEY;\n super({\n speechModel: {\n name: speechModel?.name ?? 'eleven_multilingual_v2',\n apiKey: speechModel?.apiKey,\n },\n listeningModel: {\n name: listeningModel?.name ?? 'scribe_v1',\n apiKey: listeningModel?.apiKey,\n },\n speaker,\n });\n\n if (!apiKey) {\n throw new Error('ELEVENLABS_API_KEY is not set');\n }\n\n this.client = new ElevenLabsClient({\n apiKey,\n });\n\n this.speaker = speaker || '9BWtsMINqrJLrRacOk9x'; // Aria is the default speaker\n }\n\n /**\n * Retrieves a list of available speakers from the Eleven Labs API.\n * Each speaker includes their ID, name, language, and gender.\n *\n * @returns {Promise<Array<{ voiceId: string, name: string, language: string, gender: string }>>}\n * A promise that resolves to an array of speaker objects.\n */\n async getSpeakers() {\n const voices = await this.client.voices.getAll();\n return (\n voices?.voices?.map(voice => ({\n voiceId: voice.voice_id,\n name: voice.name,\n language: voice.labels?.language || 'en',\n gender: voice.labels?.gender || 'neutral',\n })) ?? []\n );\n }\n\n private async streamToString(stream: NodeJS.ReadableStream): Promise<string> {\n const chunks: Buffer[] = [];\n for await (const chunk of stream) {\n if (typeof chunk === 'string') {\n chunks.push(Buffer.from(chunk));\n } else {\n chunks.push(chunk);\n }\n }\n return Buffer.concat(chunks).toString('utf-8');\n }\n\n /**\n * Converts text or audio input into speech using the Eleven Labs API.\n *\n * @param {string | NodeJS.ReadableStream} input - The text to be converted to speech or a stream containing audio data.\n * @param {Object} [options] - Optional parameters for the speech generation.\n * @param {string} [options.speaker] - The ID of the speaker to use for the speech. If not provided, the default speaker will be used.\n *\n * @returns {Promise<NodeJS.ReadableStream>} A promise that resolves to a readable stream of the generated speech.\n *\n * @throws {Error} If no speaker is specified or if no speech model is set.\n */\n async speak(input: string | NodeJS.ReadableStream, options?: { speaker?: string }): Promise<NodeJS.ReadableStream> {\n const speaker = options?.speaker || this.speaker;\n if (!speaker) {\n throw new Error('No speaker specified');\n }\n\n if (!this.speechModel?.name) {\n throw new Error('No speech model specified');\n }\n const text = typeof input === 'string' ? input : await this.streamToString(input);\n\n return await this.client.generate({\n text,\n voice: speaker,\n model_id: this.speechModel?.name as ElevenLabsModel,\n stream: true,\n });\n }\n\n /**\n * Checks if listening capabilities are enabled.\n *\n * @returns {Promise<{ enabled: boolean }>}\n */\n async getListener() {\n return { enabled: true };\n }\n\n /**\n * Converts audio input to text using ElevenLabs Speech-to-Text API.\n *\n * @param input - A readable stream containing the audio data to transcribe\n * @param options - Configuration options for the transcription\n * @param options.language_code - ISO language code (e.g., 'en', 'fr', 'es')\n * @param options.tag_audio_events - Whether to tag audio events like [MUSIC], [LAUGHTER], etc.\n * @param options.num_speakers - Number of speakers to detect in the audio\n * @param options.filetype - Audio file format (e.g., 'mp3', 'wav', 'ogg')\n * @param options.timeoutInSeconds - Request timeout in seconds\n * @param options.maxRetries - Maximum number of retry attempts\n * @param options.abortSignal - Signal to abort the request\n *\n * @returns A Promise that resolves to the transcribed text\n *\n */\n async listen(input: NodeJS.ReadableStream, options?: ElevenLabsListenOptions): Promise<string> {\n const chunks: Buffer[] = [];\n for await (const chunk of input) {\n if (typeof chunk === 'string') {\n chunks.push(Buffer.from(chunk));\n } else {\n chunks.push(chunk);\n }\n }\n const buffer = Buffer.concat(chunks);\n\n const { language_code, tag_audio_events, num_speakers, filetype, ...requestOptions } = options || {};\n\n const file = new File([buffer], `audio.${filetype || 'mp3'}`);\n\n const transcription = await this.client.speechToText.convert(\n {\n file: file,\n model_id: this.listeningModel?.name as ElevenLabsModel,\n language_code,\n tag_audio_events,\n num_speakers,\n },\n requestOptions,\n );\n\n return transcription.text;\n }\n}\n"]}
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,oBAAoB,CAAC;AAGjD,KAAK,eAAe,GAChB,wBAAwB,GACxB,mBAAmB,GACnB,iBAAiB,GACjB,4BAA4B,GAC5B,uBAAuB,GACvB,WAAW,CAAC;AAEhB,UAAU,qBAAqB;IAC7B,IAAI,CAAC,EAAE,eAAe,CAAC;IACvB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,UAAU,mBAAmB;IAC3B,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,UAAU,cAAc;IACtB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC5B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAGD,KAAK,uBAAuB,GAAG,mBAAmB,GAAG,cAAc,CAAC;AAEpE,qBAAa,eAAgB,SAAQ,WAAW;IAC9C,OAAO,CAAC,MAAM,CAAmB;IAEjC;;;;;;;;OAQG;gBACS,EACV,WAAW,EACX,cAAc,EACd,OAAO,GACR,GAAE;QAAE,WAAW,CAAC,EAAE,qBAAqB,CAAC;QAAC,cAAc,CAAC,EAAE,qBAAqB,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAO;IAyBzG;;;;;;OAMG;IACG,WAAW;;;;;;YAgBH,cAAc;IAY5B;;;;;;;;;;OAUG;IACG,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAAC,cAAc,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC;IAsBlH;;;;OAIG;IACG,WAAW;;;IAIjB;;;;;;;;;;;;;;;OAeG;IACG,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,cAAc,EAAE,OAAO,CAAC,EAAE,uBAAuB,GAAG,OAAO,CAAC,MAAM,CAAC;CAgC/F"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,oBAAoB,CAAC;AAGjD,KAAK,eAAe,GAChB,wBAAwB,GACxB,mBAAmB,GACnB,iBAAiB,GACjB,4BAA4B,GAC5B,uBAAuB,GACvB,WAAW,CAAC;AAEhB,UAAU,qBAAqB;IAC7B,IAAI,CAAC,EAAE,eAAe,CAAC;IACvB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,UAAU,mBAAmB;IAC3B,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,UAAU,cAAc;IACtB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC5B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAGD,KAAK,uBAAuB,GAAG,mBAAmB,GAAG,cAAc,CAAC;AAEpE,qBAAa,eAAgB,SAAQ,WAAW;IAC9C,OAAO,CAAC,MAAM,CAAmB;IAEjC;;;;;;;;OAQG;gBACS,EACV,WAAW,EACX,cAAc,EACd,OAAO,GACR,GAAE;QAAE,WAAW,CAAC,EAAE,qBAAqB,CAAC;QAAC,cAAc,CAAC,EAAE,qBAAqB,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAO;IAyBzG;;;;;;OAMG;IACG,WAAW;;;;;;YAYH,cAAc;IAY5B;;;;;;;;;;OAUG;IACG,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAAC,cAAc,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC;IAmBlH;;;;OAIG;IACG,WAAW;;;IAIjB;;;;;;;;;;;;;;;OAeG;IACG,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,cAAc,EAAE,OAAO,CAAC,EAAE,uBAAuB,GAAG,OAAO,CAAC,MAAM,CAAC;CA4B/F"}
package/dist/index.js CHANGED
@@ -46,16 +46,13 @@ var ElevenLabsVoice = class extends MastraVoice {
46
46
  * A promise that resolves to an array of speaker objects.
47
47
  */
48
48
  async getSpeakers() {
49
- const res = await this.traced(async () => {
50
- const voices = await this.client.voices.getAll();
51
- return voices?.voices?.map((voice) => ({
52
- voiceId: voice.voice_id,
53
- name: voice.name,
54
- language: voice.labels?.language || "en",
55
- gender: voice.labels?.gender || "neutral"
56
- })) ?? [];
57
- }, "voice.elevenlabs.voices")();
58
- return res;
49
+ const voices = await this.client.voices.getAll();
50
+ return voices?.voices?.map((voice) => ({
51
+ voiceId: voice.voice_id,
52
+ name: voice.name,
53
+ language: voice.labels?.language || "en",
54
+ gender: voice.labels?.gender || "neutral"
55
+ })) ?? [];
59
56
  }
60
57
  async streamToString(stream) {
61
58
  const chunks = [];
@@ -88,15 +85,12 @@ var ElevenLabsVoice = class extends MastraVoice {
88
85
  throw new Error("No speech model specified");
89
86
  }
90
87
  const text = typeof input === "string" ? input : await this.streamToString(input);
91
- const res = await this.traced(async () => {
92
- return await this.client.generate({
93
- text,
94
- voice: speaker,
95
- model_id: this.speechModel?.name,
96
- stream: true
97
- });
98
- }, "voice.elevenlabs.speak")();
99
- return res;
88
+ return await this.client.generate({
89
+ text,
90
+ voice: speaker,
91
+ model_id: this.speechModel?.name,
92
+ stream: true
93
+ });
100
94
  }
101
95
  /**
102
96
  * Checks if listening capabilities are enabled.
@@ -123,31 +117,28 @@ var ElevenLabsVoice = class extends MastraVoice {
123
117
  *
124
118
  */
125
119
  async listen(input, options) {
126
- const res = await this.traced(async () => {
127
- const chunks = [];
128
- for await (const chunk of input) {
129
- if (typeof chunk === "string") {
130
- chunks.push(Buffer.from(chunk));
131
- } else {
132
- chunks.push(chunk);
133
- }
120
+ const chunks = [];
121
+ for await (const chunk of input) {
122
+ if (typeof chunk === "string") {
123
+ chunks.push(Buffer.from(chunk));
124
+ } else {
125
+ chunks.push(chunk);
134
126
  }
135
- const buffer = Buffer.concat(chunks);
136
- const { language_code, tag_audio_events, num_speakers, filetype, ...requestOptions } = options || {};
137
- const file = new File([buffer], `audio.${filetype || "mp3"}`);
138
- const transcription = await this.client.speechToText.convert(
139
- {
140
- file,
141
- model_id: this.listeningModel?.name,
142
- language_code,
143
- tag_audio_events,
144
- num_speakers
145
- },
146
- requestOptions
147
- );
148
- return transcription.text;
149
- }, "voice.elevenlabs.listen")();
150
- return res;
127
+ }
128
+ const buffer = Buffer.concat(chunks);
129
+ const { language_code, tag_audio_events, num_speakers, filetype, ...requestOptions } = options || {};
130
+ const file = new File([buffer], `audio.${filetype || "mp3"}`);
131
+ const transcription = await this.client.speechToText.convert(
132
+ {
133
+ file,
134
+ model_id: this.listeningModel?.name,
135
+ language_code,
136
+ tag_audio_events,
137
+ num_speakers
138
+ },
139
+ requestOptions
140
+ );
141
+ return transcription.text;
151
142
  }
152
143
  };
153
144
 
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts"],"names":[],"mappings":";;;;AAkCO,IAAM,eAAA,GAAN,cAA8B,WAAA,CAAY;AAAA,EACvC,MAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWR,WAAA,CAAY;AAAA,IACV,WAAA;AAAA,IACA,cAAA;AAAA,IACA;AAAA,GACF,GAAuG,EAAC,EAAG;AACzG,IAAA,MAAM,MAAA,GAAS,WAAA,EAAa,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,kBAAA;AAClD,IAAA,KAAA,CAAM;AAAA,MACJ,WAAA,EAAa;AAAA,QACX,IAAA,EAAM,aAAa,IAAA,IAAQ,wBAAA;AAAA,QAC3B,QAAQ,WAAA,EAAa;AAAA,OACvB;AAAA,MACA,cAAA,EAAgB;AAAA,QACd,IAAA,EAAM,gBAAgB,IAAA,IAAQ,WAAA;AAAA,QAC9B,QAAQ,cAAA,EAAgB;AAAA,OAC1B;AAAA,MACA;AAAA,KACD,CAAA;AAED,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,IAAI,MAAM,+BAA+B,CAAA;AAAA,IACjD;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAI,gBAAA,CAAiB;AAAA,MACjC;AAAA,KACD,CAAA;AAED,IAAA,IAAA,CAAK,UAAU,OAAA,IAAW,sBAAA;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,WAAA,GAAc;AAClB,IAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,MAAA,CAAO,YAAY;AACxC,MAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,MAAA,CAAO,OAAO,MAAA,EAAO;AAC/C,MAAA,OACE,MAAA,EAAQ,MAAA,EAAQ,GAAA,CAAI,CAAA,KAAA,MAAU;AAAA,QAC5B,SAAS,KAAA,CAAM,QAAA;AAAA,QACf,MAAM,KAAA,CAAM,IAAA;AAAA,QACZ,QAAA,EAAU,KAAA,CAAM,MAAA,EAAQ,QAAA,IAAY,IAAA;AAAA,QACpC,MAAA,EAAQ,KAAA,CAAM,MAAA,EAAQ,MAAA,IAAU;AAAA,OAClC,CAAE,KAAK,EAAC;AAAA,IAEZ,CAAA,EAAG,yBAAyB,CAAA,EAAE;AAE9B,IAAA,OAAO,GAAA;AAAA,EACT;AAAA,EAEA,MAAc,eAAe,MAAA,EAAgD;AAC3E,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAChC,MAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAAA,MAChC,CAAA,MAAO;AACL,QAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,MACnB;AAAA,IACF;AACA,IAAA,OAAO,MAAA,CAAO,MAAA,CAAO,MAAM,CAAA,CAAE,SAAS,OAAO,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,KAAA,CAAM,KAAA,EAAuC,OAAA,EAAgE;AACjH,IAAA,MAAM,OAAA,GAAU,OAAA,EAAS,OAAA,IAAW,IAAA,CAAK,OAAA;AACzC,IAAA,IAAI,CAAC,OAAA,EAAS;AACZ,MAAA,MAAM,IAAI,MAAM,sBAAsB,CAAA;AAAA,IACxC;AAEA,IAAA,IAAI,CAAC,IAAA,CAAK,WAAA,EAAa,IAAA,EAAM;AAC3B,MAAA,MAAM,IAAI,MAAM,2BAA2B,CAAA;AAAA,IAC7C;AACA,IAAA,MAAM,IAAA,GAAO,OAAO,KAAA,KAAU,QAAA,GAAW,QAAQ,MAAM,IAAA,CAAK,eAAe,KAAK,CAAA;AAChF,IAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,MAAA,CAAO,YAAY;AACxC,MAAA,OAAO,MAAM,IAAA,CAAK,MAAA,CAAO,QAAA,CAAS;AAAA,QAChC,IAAA;AAAA,QACA,KAAA,EAAO,OAAA;AAAA,QACP,QAAA,EAAU,KAAK,WAAA,EAAa,IAAA;AAAA,QAC5B,MAAA,EAAQ;AAAA,OACT,CAAA;AAAA,IACH,CAAA,EAAG,wBAAwB,CAAA,EAAE;AAE7B,IAAA,OAAO,GAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,WAAA,GAAc;AAClB,IAAA,OAAO,EAAE,SAAS,IAAA,EAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBA,MAAM,MAAA,CAAO,KAAA,EAA8B,OAAA,EAAoD;AAC7F,IAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,MAAA,CAAO,YAAY;AACxC,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,WAAA,MAAiB,SAAS,KAAA,EAAO;AAC/B,QAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,UAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAAA,QAChC,CAAA,MAAO;AACL,UAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,QACnB;AAAA,MACF;AACA,MAAA,MAAM,MAAA,GAAS,MAAA,CAAO,MAAA,CAAO,MAAM,CAAA;AAEnC,MAAA,MAAM,EAAE,eAAe,gBAAA,EAAkB,YAAA,EAAc,UAAU,GAAG,cAAA,EAAe,GAAI,OAAA,IAAW,EAAC;AAEnG,MAAA,MAAM,IAAA,GAAO,IAAI,IAAA,CAAK,CAAC,MAAM,CAAA,EAAG,CAAA,MAAA,EAAS,QAAA,IAAY,KAAK,CAAA,CAAE,CAAA;AAE5D,MAAA,MAAM,aAAA,GAAgB,MAAM,IAAA,CAAK,MAAA,CAAO,YAAA,CAAa,OAAA;AAAA,QACnD;AAAA,UACE,IAAA;AAAA,UACA,QAAA,EAAU,KAAK,cAAA,EAAgB,IAAA;AAAA,UAC/B,aAAA;AAAA,UACA,gBAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA;AAAA,OACF;AAEA,MAAA,OAAO,aAAA,CAAc,IAAA;AAAA,IACvB,CAAA,EAAG,yBAAyB,CAAA,EAAE;AAE9B,IAAA,OAAO,GAAA;AAAA,EACT;AACF","file":"index.js","sourcesContent":["import { MastraVoice } from '@mastra/core/voice';\nimport { ElevenLabsClient } from 'elevenlabs';\n\ntype ElevenLabsModel =\n | 'eleven_multilingual_v2'\n | 'eleven_flash_v2_5'\n | 'eleven_flash_v2'\n | 'eleven_multilingual_sts_v2'\n | 'eleven_english_sts_v2'\n | 'scribe_v1';\n\ninterface ElevenLabsVoiceConfig {\n name?: ElevenLabsModel;\n apiKey?: string;\n}\n\ninterface SpeechToTextOptions {\n language_code?: string;\n tag_audio_events?: boolean;\n num_speakers?: number;\n filetype?: string;\n}\n\ninterface RequestOptions {\n timeoutInSeconds?: number;\n maxRetries?: number;\n abortSignal?: AbortSignal;\n apiKey?: string | undefined;\n headers?: Record<string, string>;\n}\n\n// Combined options type\ntype ElevenLabsListenOptions = SpeechToTextOptions & RequestOptions;\n\nexport class ElevenLabsVoice extends MastraVoice {\n private client: ElevenLabsClient;\n\n /**\n * Creates an instance of the ElevenLabsVoice class.\n *\n * @param {Object} options - The options for the voice configuration.\n * @param {ElevenLabsVoiceConfig} [options.speechModel] - The configuration for the speech model, including the model name and API key.\n * @param {string} [options.speaker] - The ID of the speaker to use. If not provided, a default speaker will be used.\n *\n * @throws {Error} If the ELEVENLABS_API_KEY is not set in the environment variables.\n */\n constructor({\n speechModel,\n listeningModel,\n speaker,\n }: { speechModel?: ElevenLabsVoiceConfig; listeningModel?: ElevenLabsVoiceConfig; speaker?: string } = {}) {\n const apiKey = speechModel?.apiKey ?? process.env.ELEVENLABS_API_KEY;\n super({\n speechModel: {\n name: speechModel?.name ?? 'eleven_multilingual_v2',\n apiKey: speechModel?.apiKey,\n },\n listeningModel: {\n name: listeningModel?.name ?? 'scribe_v1',\n apiKey: listeningModel?.apiKey,\n },\n speaker,\n });\n\n if (!apiKey) {\n throw new Error('ELEVENLABS_API_KEY is not set');\n }\n\n this.client = new ElevenLabsClient({\n apiKey,\n });\n\n this.speaker = speaker || '9BWtsMINqrJLrRacOk9x'; // Aria is the default speaker\n }\n\n /**\n * Retrieves a list of available speakers from the Eleven Labs API.\n * Each speaker includes their ID, name, language, and gender.\n *\n * @returns {Promise<Array<{ voiceId: string, name: string, language: string, gender: string }>>}\n * A promise that resolves to an array of speaker objects.\n */\n async getSpeakers() {\n const res = await this.traced(async () => {\n const voices = await this.client.voices.getAll();\n return (\n voices?.voices?.map(voice => ({\n voiceId: voice.voice_id,\n name: voice.name,\n language: voice.labels?.language || 'en',\n gender: voice.labels?.gender || 'neutral',\n })) ?? []\n );\n }, 'voice.elevenlabs.voices')();\n\n return res;\n }\n\n private async streamToString(stream: NodeJS.ReadableStream): Promise<string> {\n const chunks: Buffer[] = [];\n for await (const chunk of stream) {\n if (typeof chunk === 'string') {\n chunks.push(Buffer.from(chunk));\n } else {\n chunks.push(chunk);\n }\n }\n return Buffer.concat(chunks).toString('utf-8');\n }\n\n /**\n * Converts text or audio input into speech using the Eleven Labs API.\n *\n * @param {string | NodeJS.ReadableStream} input - The text to be converted to speech or a stream containing audio data.\n * @param {Object} [options] - Optional parameters for the speech generation.\n * @param {string} [options.speaker] - The ID of the speaker to use for the speech. If not provided, the default speaker will be used.\n *\n * @returns {Promise<NodeJS.ReadableStream>} A promise that resolves to a readable stream of the generated speech.\n *\n * @throws {Error} If no speaker is specified or if no speech model is set.\n */\n async speak(input: string | NodeJS.ReadableStream, options?: { speaker?: string }): Promise<NodeJS.ReadableStream> {\n const speaker = options?.speaker || this.speaker;\n if (!speaker) {\n throw new Error('No speaker specified');\n }\n\n if (!this.speechModel?.name) {\n throw new Error('No speech model specified');\n }\n const text = typeof input === 'string' ? input : await this.streamToString(input);\n const res = await this.traced(async () => {\n return await this.client.generate({\n text,\n voice: speaker,\n model_id: this.speechModel?.name as ElevenLabsModel,\n stream: true,\n });\n }, 'voice.elevenlabs.speak')();\n\n return res;\n }\n\n /**\n * Checks if listening capabilities are enabled.\n *\n * @returns {Promise<{ enabled: boolean }>}\n */\n async getListener() {\n return { enabled: true };\n }\n\n /**\n * Converts audio input to text using ElevenLabs Speech-to-Text API.\n *\n * @param input - A readable stream containing the audio data to transcribe\n * @param options - Configuration options for the transcription\n * @param options.language_code - ISO language code (e.g., 'en', 'fr', 'es')\n * @param options.tag_audio_events - Whether to tag audio events like [MUSIC], [LAUGHTER], etc.\n * @param options.num_speakers - Number of speakers to detect in the audio\n * @param options.filetype - Audio file format (e.g., 'mp3', 'wav', 'ogg')\n * @param options.timeoutInSeconds - Request timeout in seconds\n * @param options.maxRetries - Maximum number of retry attempts\n * @param options.abortSignal - Signal to abort the request\n *\n * @returns A Promise that resolves to the transcribed text\n *\n */\n async listen(input: NodeJS.ReadableStream, options?: ElevenLabsListenOptions): Promise<string> {\n const res = await this.traced(async () => {\n const chunks: Buffer[] = [];\n for await (const chunk of input) {\n if (typeof chunk === 'string') {\n chunks.push(Buffer.from(chunk));\n } else {\n chunks.push(chunk);\n }\n }\n const buffer = Buffer.concat(chunks);\n\n const { language_code, tag_audio_events, num_speakers, filetype, ...requestOptions } = options || {};\n\n const file = new File([buffer], `audio.${filetype || 'mp3'}`);\n\n const transcription = await this.client.speechToText.convert(\n {\n file: file,\n model_id: this.listeningModel?.name as ElevenLabsModel,\n language_code,\n tag_audio_events,\n num_speakers,\n },\n requestOptions,\n );\n\n return transcription.text;\n }, 'voice.elevenlabs.listen')();\n\n return res;\n }\n}\n"]}
1
+ {"version":3,"sources":["../src/index.ts"],"names":[],"mappings":";;;;AAkCO,IAAM,eAAA,GAAN,cAA8B,WAAA,CAAY;AAAA,EACvC,MAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWR,WAAA,CAAY;AAAA,IACV,WAAA;AAAA,IACA,cAAA;AAAA,IACA;AAAA,GACF,GAAuG,EAAC,EAAG;AACzG,IAAA,MAAM,MAAA,GAAS,WAAA,EAAa,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,kBAAA;AAClD,IAAA,KAAA,CAAM;AAAA,MACJ,WAAA,EAAa;AAAA,QACX,IAAA,EAAM,aAAa,IAAA,IAAQ,wBAAA;AAAA,QAC3B,QAAQ,WAAA,EAAa;AAAA,OACvB;AAAA,MACA,cAAA,EAAgB;AAAA,QACd,IAAA,EAAM,gBAAgB,IAAA,IAAQ,WAAA;AAAA,QAC9B,QAAQ,cAAA,EAAgB;AAAA,OAC1B;AAAA,MACA;AAAA,KACD,CAAA;AAED,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,IAAI,MAAM,+BAA+B,CAAA;AAAA,IACjD;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAI,gBAAA,CAAiB;AAAA,MACjC;AAAA,KACD,CAAA;AAED,IAAA,IAAA,CAAK,UAAU,OAAA,IAAW,sBAAA;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,WAAA,GAAc;AAClB,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,MAAA,CAAO,OAAO,MAAA,EAAO;AAC/C,IAAA,OACE,MAAA,EAAQ,MAAA,EAAQ,GAAA,CAAI,CAAA,KAAA,MAAU;AAAA,MAC5B,SAAS,KAAA,CAAM,QAAA;AAAA,MACf,MAAM,KAAA,CAAM,IAAA;AAAA,MACZ,QAAA,EAAU,KAAA,CAAM,MAAA,EAAQ,QAAA,IAAY,IAAA;AAAA,MACpC,MAAA,EAAQ,KAAA,CAAM,MAAA,EAAQ,MAAA,IAAU;AAAA,KAClC,CAAE,KAAK,EAAC;AAAA,EAEZ;AAAA,EAEA,MAAc,eAAe,MAAA,EAAgD;AAC3E,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAChC,MAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAAA,MAChC,CAAA,MAAO;AACL,QAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,MACnB;AAAA,IACF;AACA,IAAA,OAAO,MAAA,CAAO,MAAA,CAAO,MAAM,CAAA,CAAE,SAAS,OAAO,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,KAAA,CAAM,KAAA,EAAuC,OAAA,EAAgE;AACjH,IAAA,MAAM,OAAA,GAAU,OAAA,EAAS,OAAA,IAAW,IAAA,CAAK,OAAA;AACzC,IAAA,IAAI,CAAC,OAAA,EAAS;AACZ,MAAA,MAAM,IAAI,MAAM,sBAAsB,CAAA;AAAA,IACxC;AAEA,IAAA,IAAI,CAAC,IAAA,CAAK,WAAA,EAAa,IAAA,EAAM;AAC3B,MAAA,MAAM,IAAI,MAAM,2BAA2B,CAAA;AAAA,IAC7C;AACA,IAAA,MAAM,IAAA,GAAO,OAAO,KAAA,KAAU,QAAA,GAAW,QAAQ,MAAM,IAAA,CAAK,eAAe,KAAK,CAAA;AAEhF,IAAA,OAAO,MAAM,IAAA,CAAK,MAAA,CAAO,QAAA,CAAS;AAAA,MAChC,IAAA;AAAA,MACA,KAAA,EAAO,OAAA;AAAA,MACP,QAAA,EAAU,KAAK,WAAA,EAAa,IAAA;AAAA,MAC5B,MAAA,EAAQ;AAAA,KACT,CAAA;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,WAAA,GAAc;AAClB,IAAA,OAAO,EAAE,SAAS,IAAA,EAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBA,MAAM,MAAA,CAAO,KAAA,EAA8B,OAAA,EAAoD;AAC7F,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,WAAA,MAAiB,SAAS,KAAA,EAAO;AAC/B,MAAA,IAAI,OAAO,UAAU,QAAA,EAAU;AAC7B,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAAA,MAChC,CAAA,MAAO;AACL,QAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,MACnB;AAAA,IACF;AACA,IAAA,MAAM,MAAA,GAAS,MAAA,CAAO,MAAA,CAAO,MAAM,CAAA;AAEnC,IAAA,MAAM,EAAE,eAAe,gBAAA,EAAkB,YAAA,EAAc,UAAU,GAAG,cAAA,EAAe,GAAI,OAAA,IAAW,EAAC;AAEnG,IAAA,MAAM,IAAA,GAAO,IAAI,IAAA,CAAK,CAAC,MAAM,CAAA,EAAG,CAAA,MAAA,EAAS,QAAA,IAAY,KAAK,CAAA,CAAE,CAAA;AAE5D,IAAA,MAAM,aAAA,GAAgB,MAAM,IAAA,CAAK,MAAA,CAAO,YAAA,CAAa,OAAA;AAAA,MACnD;AAAA,QACE,IAAA;AAAA,QACA,QAAA,EAAU,KAAK,cAAA,EAAgB,IAAA;AAAA,QAC/B,aAAA;AAAA,QACA,gBAAA;AAAA,QACA;AAAA,OACF;AAAA,MACA;AAAA,KACF;AAEA,IAAA,OAAO,aAAA,CAAc,IAAA;AAAA,EACvB;AACF","file":"index.js","sourcesContent":["import { MastraVoice } from '@mastra/core/voice';\nimport { ElevenLabsClient } from 'elevenlabs';\n\ntype ElevenLabsModel =\n | 'eleven_multilingual_v2'\n | 'eleven_flash_v2_5'\n | 'eleven_flash_v2'\n | 'eleven_multilingual_sts_v2'\n | 'eleven_english_sts_v2'\n | 'scribe_v1';\n\ninterface ElevenLabsVoiceConfig {\n name?: ElevenLabsModel;\n apiKey?: string;\n}\n\ninterface SpeechToTextOptions {\n language_code?: string;\n tag_audio_events?: boolean;\n num_speakers?: number;\n filetype?: string;\n}\n\ninterface RequestOptions {\n timeoutInSeconds?: number;\n maxRetries?: number;\n abortSignal?: AbortSignal;\n apiKey?: string | undefined;\n headers?: Record<string, string>;\n}\n\n// Combined options type\ntype ElevenLabsListenOptions = SpeechToTextOptions & RequestOptions;\n\nexport class ElevenLabsVoice extends MastraVoice {\n private client: ElevenLabsClient;\n\n /**\n * Creates an instance of the ElevenLabsVoice class.\n *\n * @param {Object} options - The options for the voice configuration.\n * @param {ElevenLabsVoiceConfig} [options.speechModel] - The configuration for the speech model, including the model name and API key.\n * @param {string} [options.speaker] - The ID of the speaker to use. If not provided, a default speaker will be used.\n *\n * @throws {Error} If the ELEVENLABS_API_KEY is not set in the environment variables.\n */\n constructor({\n speechModel,\n listeningModel,\n speaker,\n }: { speechModel?: ElevenLabsVoiceConfig; listeningModel?: ElevenLabsVoiceConfig; speaker?: string } = {}) {\n const apiKey = speechModel?.apiKey ?? process.env.ELEVENLABS_API_KEY;\n super({\n speechModel: {\n name: speechModel?.name ?? 'eleven_multilingual_v2',\n apiKey: speechModel?.apiKey,\n },\n listeningModel: {\n name: listeningModel?.name ?? 'scribe_v1',\n apiKey: listeningModel?.apiKey,\n },\n speaker,\n });\n\n if (!apiKey) {\n throw new Error('ELEVENLABS_API_KEY is not set');\n }\n\n this.client = new ElevenLabsClient({\n apiKey,\n });\n\n this.speaker = speaker || '9BWtsMINqrJLrRacOk9x'; // Aria is the default speaker\n }\n\n /**\n * Retrieves a list of available speakers from the Eleven Labs API.\n * Each speaker includes their ID, name, language, and gender.\n *\n * @returns {Promise<Array<{ voiceId: string, name: string, language: string, gender: string }>>}\n * A promise that resolves to an array of speaker objects.\n */\n async getSpeakers() {\n const voices = await this.client.voices.getAll();\n return (\n voices?.voices?.map(voice => ({\n voiceId: voice.voice_id,\n name: voice.name,\n language: voice.labels?.language || 'en',\n gender: voice.labels?.gender || 'neutral',\n })) ?? []\n );\n }\n\n private async streamToString(stream: NodeJS.ReadableStream): Promise<string> {\n const chunks: Buffer[] = [];\n for await (const chunk of stream) {\n if (typeof chunk === 'string') {\n chunks.push(Buffer.from(chunk));\n } else {\n chunks.push(chunk);\n }\n }\n return Buffer.concat(chunks).toString('utf-8');\n }\n\n /**\n * Converts text or audio input into speech using the Eleven Labs API.\n *\n * @param {string | NodeJS.ReadableStream} input - The text to be converted to speech or a stream containing audio data.\n * @param {Object} [options] - Optional parameters for the speech generation.\n * @param {string} [options.speaker] - The ID of the speaker to use for the speech. If not provided, the default speaker will be used.\n *\n * @returns {Promise<NodeJS.ReadableStream>} A promise that resolves to a readable stream of the generated speech.\n *\n * @throws {Error} If no speaker is specified or if no speech model is set.\n */\n async speak(input: string | NodeJS.ReadableStream, options?: { speaker?: string }): Promise<NodeJS.ReadableStream> {\n const speaker = options?.speaker || this.speaker;\n if (!speaker) {\n throw new Error('No speaker specified');\n }\n\n if (!this.speechModel?.name) {\n throw new Error('No speech model specified');\n }\n const text = typeof input === 'string' ? input : await this.streamToString(input);\n\n return await this.client.generate({\n text,\n voice: speaker,\n model_id: this.speechModel?.name as ElevenLabsModel,\n stream: true,\n });\n }\n\n /**\n * Checks if listening capabilities are enabled.\n *\n * @returns {Promise<{ enabled: boolean }>}\n */\n async getListener() {\n return { enabled: true };\n }\n\n /**\n * Converts audio input to text using ElevenLabs Speech-to-Text API.\n *\n * @param input - A readable stream containing the audio data to transcribe\n * @param options - Configuration options for the transcription\n * @param options.language_code - ISO language code (e.g., 'en', 'fr', 'es')\n * @param options.tag_audio_events - Whether to tag audio events like [MUSIC], [LAUGHTER], etc.\n * @param options.num_speakers - Number of speakers to detect in the audio\n * @param options.filetype - Audio file format (e.g., 'mp3', 'wav', 'ogg')\n * @param options.timeoutInSeconds - Request timeout in seconds\n * @param options.maxRetries - Maximum number of retry attempts\n * @param options.abortSignal - Signal to abort the request\n *\n * @returns A Promise that resolves to the transcribed text\n *\n */\n async listen(input: NodeJS.ReadableStream, options?: ElevenLabsListenOptions): Promise<string> {\n const chunks: Buffer[] = [];\n for await (const chunk of input) {\n if (typeof chunk === 'string') {\n chunks.push(Buffer.from(chunk));\n } else {\n chunks.push(chunk);\n }\n }\n const buffer = Buffer.concat(chunks);\n\n const { language_code, tag_audio_events, num_speakers, filetype, ...requestOptions } = options || {};\n\n const file = new File([buffer], `audio.${filetype || 'mp3'}`);\n\n const transcription = await this.client.speechToText.convert(\n {\n file: file,\n model_id: this.listeningModel?.name as ElevenLabsModel,\n language_code,\n tag_audio_events,\n num_speakers,\n },\n requestOptions,\n );\n\n return transcription.text;\n }\n}\n"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mastra/voice-elevenlabs",
3
- "version": "0.11.12",
3
+ "version": "0.12.0-beta.0",
4
4
  "description": "Mastra ElevenLabs voice integration",
5
5
  "type": "module",
6
6
  "files": [
@@ -33,12 +33,12 @@
33
33
  "tsup": "^8.5.0",
34
34
  "typescript": "^5.8.3",
35
35
  "vitest": "^3.2.4",
36
- "@internal/types-builder": "0.0.33",
37
- "@internal/lint": "0.0.58",
38
- "@mastra/core": "0.24.0"
36
+ "@internal/lint": "0.0.53",
37
+ "@mastra/core": "1.0.0-beta.0",
38
+ "@internal/types-builder": "0.0.28"
39
39
  },
40
40
  "peerDependencies": {
41
- "@mastra/core": ">=0.18.1-0 <0.25.0-0",
41
+ "@mastra/core": ">=1.0.0-0 <2.0.0-0",
42
42
  "zod": "^3.25.0 || ^4.0.0"
43
43
  },
44
44
  "homepage": "https://mastra.ai",
@@ -50,12 +50,8 @@
50
50
  "bugs": {
51
51
  "url": "https://github.com/mastra-ai/mastra/issues"
52
52
  },
53
- "publishConfig": {
54
- "access": "public",
55
- "publish-branch": [
56
- "main",
57
- "0.x"
58
- ]
53
+ "engines": {
54
+ "node": ">=22.13.0"
59
55
  },
60
56
  "scripts": {
61
57
  "build": "tsup --silent --config tsup.config.ts",