@elevenlabs/react 0.8.1 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,22 +1,22 @@
1
1
 
2
- > @elevenlabs/react@0.8.1 prebuild /home/runner/work/packages/packages/packages/react
2
+ > @elevenlabs/react@0.9.1 prebuild /home/runner/work/packages/packages/packages/react
3
3
  > npm run generate-version
4
4
 
5
5
 
6
- > @elevenlabs/react@0.8.1 generate-version
6
+ > @elevenlabs/react@0.9.1 generate-version
7
7
  > printf "// This file is auto-generated during build\nexport const PACKAGE_VERSION = \"%s\";\n" "$npm_package_version" > src/version.ts
8
8
 
9
9
 
10
- > @elevenlabs/react@0.8.1 build /home/runner/work/packages/packages/packages/react
10
+ > @elevenlabs/react@0.9.1 build /home/runner/work/packages/packages/packages/react
11
11
  > BROWSERSLIST_ENV=modern microbundle --jsx React.createElement --jsxFragment React.Fragment --jsxImportSource react src/index.ts
12
12
 
13
13
  No name was provided for external module '@elevenlabs/client' in output.globals – guessing 'client'
14
14
  Build "@elevenlabs/react" to dist:
15
- 2.02 kB: lib.cjs.gz
16
- 1.79 kB: lib.cjs.br
17
- 1.78 kB: lib.modern.js.gz
18
- 1.57 kB: lib.modern.js.br
19
- 2 kB: lib.module.js.gz
20
- 1.78 kB: lib.module.js.br
21
- 2.1 kB: lib.umd.js.gz
22
- 1.86 kB: lib.umd.js.br
15
+ 3.18 kB: lib.cjs.gz
16
+ 2.81 kB: lib.cjs.br
17
+ 2.87 kB: lib.modern.js.gz
18
+ 2.52 kB: lib.modern.js.br
19
+ 3.14 kB: lib.module.js.gz
20
+ 2.78 kB: lib.module.js.br
21
+ 3.24 kB: lib.umd.js.gz
22
+ 2.87 kB: lib.umd.js.br
@@ -1,4 +1,4 @@
1
1
 
2
- > @elevenlabs/react@0.8.1 generate-version /home/runner/work/packages/packages/packages/react
2
+ > @elevenlabs/react@0.9.1 generate-version /home/runner/work/packages/packages/packages/react
3
3
  > printf "// This file is auto-generated during build\nexport const PACKAGE_VERSION = \"%s\";\n" "$npm_package_version" > src/version.ts
4
4
 
package/README.md CHANGED
@@ -525,6 +525,499 @@ const { canSendFeedback } = useConversation();
525
525
  console.log(canSendFeedback); // boolean
526
526
  ```
527
527
 
528
+ ### useScribe
529
+
530
+ React hook for managing real-time speech-to-text transcription with ElevenLabs Scribe Realtime v2.
531
+
532
+ **Note:** Scribe Realtime v2 is currently in closed beta. For access please [contact sales](https://elevenlabs.io/contact-sales).
533
+
534
+ #### Quick Start
535
+
536
+ ```tsx
537
+ import { useScribe } from "@elevenlabs/react";
538
+
539
+ function MyComponent() {
540
+ const scribe = useScribe({
541
+ modelId: "scribe_realtime_v2",
542
+ onPartialTranscript: (data) => {
543
+ console.log("Partial:", data.text);
544
+ },
545
+ onFinalTranscript: (data) => {
546
+ console.log("Final:", data.text);
547
+ },
548
+ });
549
+
550
+ const handleStart = async () => {
551
+ const token = await fetchTokenFromServer();
552
+ await scribe.connect({
553
+ token,
554
+ microphone: {
555
+ echoCancellation: true,
556
+ noiseSuppression: true,
557
+ },
558
+ });
559
+ };
560
+
561
+ return (
562
+ <div>
563
+ <button onClick={handleStart} disabled={scribe.isConnected}>
564
+ Start Recording
565
+ </button>
566
+ <button onClick={scribe.disconnect} disabled={!scribe.isConnected}>
567
+ Stop
568
+ </button>
569
+
570
+ {scribe.partialTranscript && <p>Live: {scribe.partialTranscript}</p>}
571
+
572
+ <div>
573
+ {scribe.finalTranscripts.map((t) => (
574
+ <p key={t.id}>{t.text}</p>
575
+ ))}
576
+ </div>
577
+ </div>
578
+ );
579
+ }
580
+ ```
581
+
582
+ #### Getting a Token
583
+
584
+ Scribe requires a single-use token for authentication. Create an API endpoint on your server:
585
+
586
+ ```js
587
+ // Node.js server
588
+ app.get("/scribe-token", yourAuthMiddleware, async (req, res) => {
589
+ const response = await fetch(
590
+ "https://api.elevenlabs.io/v1/single-use-token/realtime_scribe",
591
+ {
592
+ headers: {
593
+ "xi-api-key": process.env.ELEVENLABS_API_KEY,
594
+ },
595
+ }
596
+ );
597
+
598
+ const data = await response.json();
599
+ res.json({ token: data.token });
600
+ });
601
+ ```
602
+
603
+ **Warning:** Your ElevenLabs API key is sensitive, do not leak it to the client. Always generate the token on the server.
604
+
605
+ ```tsx
606
+ // Client
607
+ const fetchToken = async () => {
608
+ const response = await fetch("/scribe-token");
609
+ const { token } = await response.json();
610
+ return token;
611
+ };
612
+ ```
613
+
614
+ #### Hook Options
615
+
616
+ Configure the hook with default options and callbacks:
617
+
618
+ ```tsx
619
+ const scribe = useScribe({
620
+ // Connection options (can be overridden in connect())
621
+ token: "optional-default-token",
622
+ modelId: "scribe_realtime_v2",
623
+ baseUri: "wss://api.elevenlabs.io",
624
+
625
+ // VAD options
626
+ commitStrategy: CommitStrategy.AUTOMATIC,
627
+ vadSilenceThresholdSecs: 0.5,
628
+ vadThreshold: 0.5,
629
+ minSpeechDurationMs: 100,
630
+ minSilenceDurationMs: 500,
631
+ languageCode: "en",
632
+
633
+ // Microphone options (for automatic mode)
634
+ microphone: {
635
+ deviceId: "optional-device-id",
636
+ echoCancellation: true,
637
+ noiseSuppression: true,
638
+ autoGainControl: true,
639
+ },
640
+
641
+ // Manual audio options (for file transcription)
642
+ audioFormat: AudioFormat.PCM_16000,
643
+ sampleRate: 16000,
644
+
645
+ // Auto-connect on mount
646
+ autoConnect: false,
647
+
648
+ // Event callbacks
649
+ onSessionStarted: () => console.log("Session started"),
650
+ onPartialTranscript: (data) => console.log("Partial:", data.text),
651
+ onFinalTranscript: (data) => console.log("Final:", data.text),
652
+ onFinalTranscriptWithTimestamps: (data) => console.log("With timestamps:", data),
653
+ onError: (error) => console.error("Error:", error),
654
+ onAuthError: (data) => console.error("Auth error:", data.error),
655
+ onConnect: () => console.log("Connected"),
656
+ onDisconnect: () => console.log("Disconnected"),
657
+ });
658
+ ```
659
+
660
+ #### Microphone Mode
661
+
662
+ Stream audio directly from the user's microphone:
663
+
664
+ ```tsx
665
+ function MicrophoneTranscription() {
666
+ const scribe = useScribe({
667
+ modelId: "scribe_realtime_v2",
668
+ });
669
+
670
+ const startRecording = async () => {
671
+ const token = await fetchToken();
672
+ await scribe.connect({
673
+ token,
674
+ microphone: {
675
+ echoCancellation: true,
676
+ noiseSuppression: true,
677
+ autoGainControl: true,
678
+ },
679
+ });
680
+ };
681
+
682
+ return (
683
+ <div>
684
+ <button onClick={startRecording} disabled={scribe.isConnected}>
685
+ {scribe.status === "connecting" ? "Connecting..." : "Start"}
686
+ </button>
687
+ <button onClick={scribe.disconnect} disabled={!scribe.isConnected}>
688
+ Stop
689
+ </button>
690
+
691
+ {scribe.partialTranscript && (
692
+ <div>
693
+ <strong>Speaking:</strong> {scribe.partialTranscript}
694
+ </div>
695
+ )}
696
+
697
+ {scribe.finalTranscripts.map((transcript) => (
698
+ <div key={transcript.id}>{transcript.text}</div>
699
+ ))}
700
+ </div>
701
+ );
702
+ }
703
+ ```
704
+
705
+ #### Manual Audio Mode (File Transcription)
706
+
707
+ Transcribe pre-recorded audio files:
708
+
709
+ ```tsx
710
+ import { useScribe, AudioFormat } from "@elevenlabs/react";
711
+
712
+ function FileTranscription() {
713
+ const [file, setFile] = useState<File | null>(null);
714
+ const scribe = useScribe({
715
+ modelId: "scribe_realtime_v2",
716
+ audioFormat: AudioFormat.PCM_16000,
717
+ sampleRate: 16000,
718
+ });
719
+
720
+ const transcribeFile = async () => {
721
+ if (!file) return;
722
+
723
+ const token = await fetchToken();
724
+ await scribe.connect({ token });
725
+
726
+ // Decode audio file
727
+ const arrayBuffer = await file.arrayBuffer();
728
+ const audioContext = new AudioContext({ sampleRate: 16000 });
729
+ const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
730
+
731
+ // Convert to PCM16
732
+ const channelData = audioBuffer.getChannelData(0);
733
+ const pcmData = new Int16Array(channelData.length);
734
+
735
+ for (let i = 0; i < channelData.length; i++) {
736
+ const sample = Math.max(-1, Math.min(1, channelData[i]));
737
+ pcmData[i] = sample < 0 ? sample * 32768 : sample * 32767;
738
+ }
739
+
740
+ // Send in chunks
741
+ const chunkSize = 4096;
742
+ for (let offset = 0; offset < pcmData.length; offset += chunkSize) {
743
+ const chunk = pcmData.slice(offset, offset + chunkSize);
744
+ const bytes = new Uint8Array(chunk.buffer);
745
+ const base64 = btoa(String.fromCharCode(...bytes));
746
+
747
+ scribe.sendAudio(base64);
748
+ await new Promise((resolve) => setTimeout(resolve, 50));
749
+ }
750
+
751
+ // Commit transcription
752
+ scribe.commit();
753
+ };
754
+
755
+ return (
756
+ <div>
757
+ <input
758
+ type="file"
759
+ accept="audio/*"
760
+ onChange={(e) => setFile(e.target.files?.[0] || null)}
761
+ />
762
+ <button onClick={transcribeFile} disabled={!file || scribe.isConnected}>
763
+ Transcribe
764
+ </button>
765
+
766
+ {scribe.finalTranscripts.map((transcript) => (
767
+ <div key={transcript.id}>{transcript.text}</div>
768
+ ))}
769
+ </div>
770
+ );
771
+ }
772
+ ```
773
+
774
+ #### Hook Return Values
775
+
776
+ ##### State
777
+
778
+ - **status** - Current connection status: `"disconnected"`, `"connecting"`, `"connected"`, `"transcribing"`, or `"error"`
779
+ - **isConnected** - Boolean indicating if connected
780
+ - **isTranscribing** - Boolean indicating if actively transcribing
781
+ - **partialTranscript** - Current partial (interim) transcript
782
+ - **finalTranscripts** - Array of completed transcript segments
783
+ - **error** - Current error message, or null
784
+
785
+ ```tsx
786
+ const scribe = useScribe(/* options */);
787
+
788
+ console.log(scribe.status); // "connected"
789
+ console.log(scribe.isConnected); // true
790
+ console.log(scribe.partialTranscript); // "hello world"
791
+ console.log(scribe.finalTranscripts); // [{ id: "...", text: "...", timestamp: ..., isFinal: true }]
792
+ console.log(scribe.error); // null or error string
793
+ ```
794
+
795
+ ##### Methods
796
+
797
+ ###### connect(options?)
798
+
799
+ Connect to Scribe. Options provided here override hook defaults:
800
+
801
+ ```tsx
802
+ await scribe.connect({
803
+ token: "your-token", // Required
804
+ microphone: { /* ... */ }, // For microphone mode
805
+ // OR
806
+ audioFormat: AudioFormat.PCM_16000, // For manual mode
807
+ sampleRate: 16000,
808
+ });
809
+ ```
810
+
811
+ ###### disconnect()
812
+
813
+ Disconnect and clean up resources:
814
+
815
+ ```tsx
816
+ scribe.disconnect();
817
+ ```
818
+
819
+ ###### sendAudio(audioBase64, options?)
820
+
821
+ Send audio data (manual mode only):
822
+
823
+ ```tsx
824
+ scribe.sendAudio(base64AudioChunk, {
825
+ commit: false, // Optional: commit immediately
826
+ sampleRate: 16000, // Optional: override sample rate
827
+ });
828
+ ```
829
+
830
+ ###### commit()
831
+
832
+ Manually commit the current transcription:
833
+
834
+ ```tsx
835
+ scribe.commit();
836
+ ```
837
+
838
+ ###### clearTranscripts()
839
+
840
+ Clear all transcripts from state:
841
+
842
+ ```tsx
843
+ scribe.clearTranscripts();
844
+ ```
845
+
846
+ ###### getConnection()
847
+
848
+ Get the underlying connection instance:
849
+
850
+ ```tsx
851
+ const connection = scribe.getConnection();
852
+ // Returns RealtimeConnection | null
853
+ ```
854
+
855
+ #### Transcript Segment Type
856
+
857
+ Each final transcript segment has the following structure:
858
+
859
+ ```typescript
860
+ interface TranscriptSegment {
861
+ id: string; // Unique identifier
862
+ text: string; // Transcript text
863
+ timestamp: number; // Unix timestamp
864
+ isFinal: boolean; // Always true for final transcripts
865
+ }
866
+ ```
867
+
868
+ #### Event Callbacks
869
+
870
+ All event callbacks are optional and can be provided as hook options:
871
+
872
+ ```tsx
873
+ const scribe = useScribe({
874
+ onSessionStarted: () => {
875
+ console.log("Session started");
876
+ },
877
+ onPartialTranscript: (data: { text: string }) => {
878
+ console.log("Partial:", data.text);
879
+ },
880
+ onFinalTranscript: (data: { text: string }) => {
881
+ console.log("Final:", data.text);
882
+ },
883
+ onFinalTranscriptWithTimestamps: (data: {
884
+ text: string;
885
+ timestamps?: { start: number; end: number }[];
886
+ }) => {
887
+ console.log("Text:", data.text);
888
+ console.log("Word timestamps:", data.timestamps);
889
+ },
890
+ onError: (error: Error | Event) => {
891
+ console.error("Connection error:", error);
892
+ },
893
+ onAuthError: (data: { error: string }) => {
894
+ console.error("Auth error:", data.error);
895
+ },
896
+ onConnect: () => {
897
+ console.log("WebSocket opened");
898
+ },
899
+ onDisconnect: () => {
900
+ console.log("WebSocket closed");
901
+ },
902
+ });
903
+ ```
904
+
905
+ #### Commit Strategies
906
+
907
+ Control when transcriptions are finalized:
908
+
909
+ ```tsx
910
+ import { CommitStrategy } from "@elevenlabs/react";
911
+
912
+ // Automatic (default) - API detects speech end
913
+ const scribe = useScribe({
914
+ commitStrategy: CommitStrategy.AUTOMATIC,
915
+ });
916
+
917
+ // Manual - you control when to commit
918
+ const scribe = useScribe({
919
+ commitStrategy: CommitStrategy.MANUAL,
920
+ });
921
+
922
+ // Later...
923
+ scribe.commit(); // Finalize transcription
924
+ ```
925
+
926
+ #### Complete Example
927
+
928
+ ```tsx
929
+ import { useScribe, AudioFormat, CommitStrategy } from "@elevenlabs/react";
930
+ import { useState } from "react";
931
+
932
+ function ScribeDemo() {
933
+ const [mode, setMode] = useState<"microphone" | "file">("microphone");
934
+
935
+ const scribe = useScribe({
936
+ modelId: "scribe_realtime_v2",
937
+ commitStrategy: CommitStrategy.AUTOMATIC,
938
+ onSessionStarted: () => console.log("Started"),
939
+ onFinalTranscript: (data) => console.log("Final:", data.text),
940
+ onError: (error) => console.error("Error:", error),
941
+ });
942
+
943
+ const startMicrophone = async () => {
944
+ const token = await fetchToken();
945
+ await scribe.connect({
946
+ token,
947
+ microphone: {
948
+ echoCancellation: true,
949
+ noiseSuppression: true,
950
+ },
951
+ });
952
+ };
953
+
954
+ return (
955
+ <div>
956
+ <h1>Scribe Demo</h1>
957
+
958
+ {/* Status */}
959
+ <div>
960
+ Status: {scribe.status}
961
+ {scribe.error && <span>Error: {scribe.error}</span>}
962
+ </div>
963
+
964
+ {/* Controls */}
965
+ <div>
966
+ {!scribe.isConnected ? (
967
+ <button onClick={startMicrophone}>Start Recording</button>
968
+ ) : (
969
+ <button onClick={scribe.disconnect}>Stop</button>
970
+ )}
971
+ <button onClick={scribe.clearTranscripts}>Clear</button>
972
+ </div>
973
+
974
+ {/* Live Transcript */}
975
+ {scribe.partialTranscript && (
976
+ <div>
977
+ <strong>Live:</strong> {scribe.partialTranscript}
978
+ </div>
979
+ )}
980
+
981
+ {/* Final Transcripts */}
982
+ <div>
983
+ <h2>Transcripts ({scribe.finalTranscripts.length})</h2>
984
+ {scribe.finalTranscripts.map((t) => (
985
+ <div key={t.id}>
986
+ <span>{new Date(t.timestamp).toLocaleTimeString()}</span>
987
+ <p>{t.text}</p>
988
+ </div>
989
+ ))}
990
+ </div>
991
+ </div>
992
+ );
993
+ }
994
+ ```
995
+
996
+ #### TypeScript Support
997
+
998
+ Full TypeScript types are included:
999
+
1000
+ ```typescript
1001
+ import {
1002
+ useScribe,
1003
+ AudioFormat,
1004
+ CommitStrategy,
1005
+ RealtimeEvents,
1006
+ type UseScribeReturn,
1007
+ type ScribeHookOptions,
1008
+ type ScribeStatus,
1009
+ type TranscriptSegment,
1010
+ type RealtimeConnection,
1011
+ } from "@elevenlabs/react";
1012
+
1013
+ const scribe: UseScribeReturn = useScribe({
1014
+ modelId: "scribe_realtime_v2",
1015
+ microphone: {
1016
+ echoCancellation: true,
1017
+ },
1018
+ });
1019
+ ```
1020
+
528
1021
  ## CSP compliance
529
1022
 
530
1023
  If your application has a tight Content Security Policy and does not allow data: or blob: in the `script-src` (w3.org/TR/CSP2#source-list-guid-matching), you self-host the needed files in the public folder.
package/dist/index.d.ts CHANGED
@@ -14,6 +14,8 @@ export declare function getOriginForLocation(location: Location): string;
14
14
  export declare function getLivekitUrlForLocation(location: Location): string;
15
15
  export type { Role, Mode, Status, SessionConfig, DisconnectionDetails, Language, VadScoreEvent, InputConfig, FormatConfig, VoiceConversation, TextConversation, Callbacks, } from "@elevenlabs/client";
16
16
  export { postOverallFeedback } from "@elevenlabs/client";
17
+ export { useScribe, AudioFormat, CommitStrategy, RealtimeEvents, } from "./scribe";
18
+ export type { ScribeStatus, TranscriptSegment, ScribeCallbacks, ScribeHookOptions, UseScribeReturn, RealtimeConnection, } from "./scribe";
17
19
  export type HookOptions = Partial<SessionConfig & HookCallbacks & ClientToolsConfig & InputConfig & OutputConfig & AudioWorkletConfig & FormatConfig & {
18
20
  serverLocation?: Location | string;
19
21
  }>;
package/dist/lib.cjs CHANGED
@@ -1,2 +1,2 @@
1
- var n=require("react"),e=require("@elevenlabs/client");function t(){return t=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var t=arguments[e];for(var r in t)({}).hasOwnProperty.call(t,r)&&(n[r]=t[r])}return n},t.apply(null,arguments)}var r=["micMuted","volume","serverLocation"];function o(n){switch(void 0===n&&(n="us"),n){case"eu-residency":case"in-residency":case"us":case"global":return n;default:return console.warn("[ConversationalAI] Invalid server-location: "+n+'. Defaulting to "us"'),"us"}}function u(n){return{us:"wss://api.elevenlabs.io","eu-residency":"wss://api.eu.residency.elevenlabs.io","in-residency":"wss://api.in.residency.elevenlabs.io",global:"wss://api.elevenlabs.io"}[n]}function l(n){return{us:"wss://livekit.rtc.elevenlabs.io","eu-residency":"wss://livekit.rtc.eu.residency.elevenlabs.io","in-residency":"wss://livekit.rtc.in.residency.elevenlabs.io",global:"wss://livekit.rtc.elevenlabs.io"}[n]}Object.defineProperty(exports,"postOverallFeedback",{enumerable:!0,get:function(){return e.postOverallFeedback}}),exports.getLivekitUrlForLocation=l,exports.getOriginForLocation=u,exports.parseLocation=o,exports.useConversation=function(i){void 0===i&&(i={});var s=i.micMuted,c=i.volume,a=i.serverLocation,v=function(n,e){if(null==n)return{};var t={};for(var r in n)if({}.hasOwnProperty.call(n,r)){if(-1!==e.indexOf(r))continue;t[r]=n[r]}return t}(i,r),d=n.useRef(null),f=n.useRef(null),g=n.useState("disconnected"),p=g[0],h=g[1],C=n.useState(!1),b=C[0],m=C[1],y=n.useState("listening"),M=y[0],P=y[1],S=n.useRef(s),I=n.useRef(c);return S.current=s,I.current=c,n.useEffect(function(){var n;void 0!==s&&(null==d||null==(n=d.current)||n.setMicMuted(s))},[s]),n.useEffect(function(){var n;void 0!==c&&(null==d||null==(n=d.current)||n.setVolume({volume:c}))},[c]),n.useEffect(function(){return function(){var n;null==(n=d.current)||n.endSession()}},[]),{startSession:function(n){try{var r,i,s=function(r){return i?r:function(r,i){try{var s=(O=u(A=o((null==n?void 0:n.serverLocation)||a)),F=l(A),f.current=e.Conversation.startSession(t({},null!=v?v:{},null!=n?n:{},{origin:O,livekitUrl:F,overrides:t({},null!=(c=null==v?void 0:v.overrides)?c:{},null!=(g=null==n?void 0:n.overrides)?g:{},{client:t({},null!=(p=null==v||null==(C=v.overrides)?void 0:C.client)?p:{},null!=(b=null==n||null==(y=n.overrides)?void 0:y.client)?b:{},{source:(null==n||null==(M=n.overrides)||null==(M=M.client)?void 0:M.source)||(null==v||null==(k=v.overrides)||null==(k=k.client)?void 0:k.source)||"react_sdk",version:(null==n||null==(w=n.overrides)||null==(w=w.client)?void 0:w.version)||(null==v||null==(D=v.overrides)||null==(D=D.client)?void 0:D.version)||"0.8.1"})}),onConnect:(null==n?void 0:n.onConnect)||(null==v?void 0:v.onConnect),onDisconnect:(null==n?void 0:n.onDisconnect)||(null==v?void 0:v.onDisconnect),onError:(null==n?void 0:n.onError)||(null==v?void 0:v.onError),onMessage:(null==n?void 0:n.onMessage)||(null==v?void 0:v.onMessage),onAudio:(null==n?void 0:n.onAudio)||(null==v?void 0:v.onAudio),onDebug:(null==n?void 0:n.onDebug)||(null==v?void 0:v.onDebug),onUnhandledClientToolCall:(null==n?void 0:n.onUnhandledClientToolCall)||(null==v?void 0:v.onUnhandledClientToolCall),onVadScore:(null==n?void 0:n.onVadScore)||(null==v?void 0:v.onVadScore),onInterruption:(null==n?void 0:n.onInterruption)||(null==v?void 0:v.onInterruption),onAgentToolResponse:(null==n?void 0:n.onAgentToolResponse)||(null==v?void 0:v.onAgentToolResponse),onConversationMetadata:(null==n?void 0:n.onConversationMetadata)||(null==v?void 0:v.onConversationMetadata),onMCPToolCall:(null==n?void 0:n.onMCPToolCall)||(null==v?void 0:v.onMCPToolCall),onMCPConnectionStatus:(null==n?void 0:n.onMCPConnectionStatus)||(null==v?void 0:v.onMCPConnectionStatus),onAsrInitiationMetadata:(null==n?void 0:n.onAsrInitiationMetadata)||(null==v?void 0:v.onAsrInitiationMetadata),onAgentChatResponsePart:(null==n?void 0:n.onAgentChatResponsePart)||(null==v?void 0:v.onAgentChatResponsePart),onModeChange:function(e){var t,r=e.mode;P(r),null==(t=(null==n?void 0:n.onModeChange)||(null==v?void 0:v.onModeChange))||t({mode:r})},onStatusChange:function(e){var t,r=e.status;h(r),null==(t=(null==n?void 0:n.onStatusChange)||(null==v?void 0:v.onStatusChange))||t({status:r})},onCanSendFeedbackChange:function(e){var t,r=e.canSendFeedback;m(r),null==(t=(null==n?void 0:n.onCanSendFeedbackChange)||(null==v?void 0:v.onCanSendFeedbackChange))||t({canSendFeedback:r})}})),Promise.resolve(f.current).then(function(n){return d.current=n,void 0!==S.current&&d.current.setMicMuted(S.current),void 0!==I.current&&d.current.setVolume({volume:I.current}),d.current.getId()}))}catch(n){return i(!0,n)}var c,g,p,C,b,y,M,k,w,D,A,O,F;return s&&s.then?s.then(i.bind(null,!1),i.bind(null,!0)):i(!1,s)}(0,function(n,e){if(f.current=null,n)throw e;return e})};if(null!=(r=d.current)&&r.isOpen())return Promise.resolve(d.current.getId());var c=function(){if(f.current)return Promise.resolve(f.current).then(function(n){var e=n.getId();return i=1,e})}();return Promise.resolve(c&&c.then?c.then(s):s(c))}catch(n){return Promise.reject(n)}},endSession:function(){try{var n=d.current;return d.current=null,Promise.resolve(null==n?void 0:n.endSession()).then(function(){})}catch(n){return Promise.reject(n)}},setVolume:function(n){var e;null==(e=d.current)||e.setVolume({volume:n.volume})},getInputByteFrequencyData:function(){var n;return null==(n=d.current)?void 0:n.getInputByteFrequencyData()},getOutputByteFrequencyData:function(){var n;return null==(n=d.current)?void 0:n.getOutputByteFrequencyData()},getInputVolume:function(){var n,e;return null!=(n=null==(e=d.current)?void 0:e.getInputVolume())?n:0},getOutputVolume:function(){var n,e;return null!=(n=null==(e=d.current)?void 0:e.getOutputVolume())?n:0},sendFeedback:function(n){var e;null==(e=d.current)||e.sendFeedback(n)},getId:function(){var n;return null==(n=d.current)?void 0:n.getId()},sendContextualUpdate:function(n){var e;null==(e=d.current)||e.sendContextualUpdate(n)},sendUserMessage:function(n){var e;null==(e=d.current)||e.sendUserMessage(n)},sendUserActivity:function(){var n;null==(n=d.current)||n.sendUserActivity()},sendMCPToolApprovalResult:function(n,e){var t;null==(t=d.current)||t.sendMCPToolApprovalResult(n,e)},changeInputDevice:function(n){try{var e,t=function(n){if(e)return n;throw new Error("Device switching is only available for voice conversations")},r=function(){if(d.current&&"changeInputDevice"in d.current)return Promise.resolve(d.current.changeInputDevice(n)).then(function(n){return e=1,n})}();return Promise.resolve(r&&r.then?r.then(t):t(r))}catch(n){return Promise.reject(n)}},changeOutputDevice:function(n){try{var e,t=function(n){if(e)return n;throw new Error("Device switching is only available for voice conversations")},r=function(){if(d.current&&"changeOutputDevice"in d.current)return Promise.resolve(d.current.changeOutputDevice(n)).then(function(n){return e=1,n})}();return Promise.resolve(r&&r.then?r.then(t):t(r))}catch(n){return Promise.reject(n)}},status:p,canSendFeedback:b,micMuted:s,isSpeaking:"speaking"===M}};
1
+ var e=require("react"),n=require("@elevenlabs/client");function t(){return t=Object.assign?Object.assign.bind():function(e){for(var n=1;n<arguments.length;n++){var t=arguments[n];for(var r in t)({}).hasOwnProperty.call(t,r)&&(e[r]=t[r])}return e},t.apply(null,arguments)}var r=["micMuted","volume","serverLocation"];function o(e){switch(void 0===e&&(e="us"),e){case"eu-residency":case"in-residency":case"us":case"global":return e;default:return console.warn("[ConversationalAI] Invalid server-location: "+e+'. Defaulting to "us"'),"us"}}function i(e){return{us:"wss://api.elevenlabs.io","eu-residency":"wss://api.eu.residency.elevenlabs.io","in-residency":"wss://api.in.residency.elevenlabs.io",global:"wss://api.elevenlabs.io"}[e]}function u(e){return{us:"wss://livekit.rtc.elevenlabs.io","eu-residency":"wss://livekit.rtc.eu.residency.elevenlabs.io","in-residency":"wss://livekit.rtc.in.residency.elevenlabs.io",global:"wss://livekit.rtc.elevenlabs.io"}[e]}Object.defineProperty(exports,"AudioFormat",{enumerable:!0,get:function(){return n.AudioFormat}}),Object.defineProperty(exports,"CommitStrategy",{enumerable:!0,get:function(){return n.CommitStrategy}}),Object.defineProperty(exports,"RealtimeEvents",{enumerable:!0,get:function(){return n.RealtimeEvents}}),Object.defineProperty(exports,"postOverallFeedback",{enumerable:!0,get:function(){return n.postOverallFeedback}}),exports.getLivekitUrlForLocation=u,exports.getOriginForLocation=i,exports.parseLocation=o,exports.useConversation=function(l){void 0===l&&(l={});var a=l.micMuted,c=l.volume,s=l.serverLocation,d=function(e,n){if(null==e)return{};var t={};for(var r in e)if({}.hasOwnProperty.call(e,r)){if(-1!==n.indexOf(r))continue;t[r]=e[r]}return t}(l,r),v=e.useRef(null),f=e.useRef(null),m=e.useState("disconnected"),h=m[0],g=m[1],p=e.useState(!1),S=p[0],C=p[1],b=e.useState("listening"),y=b[0],M=b[1],T=e.useRef(a),R=e.useRef(c);return T.current=a,R.current=c,e.useEffect(function(){var e;void 0!==a&&(null==v||null==(e=v.current)||e.setMicMuted(a))},[a]),e.useEffect(function(){var e;void 0!==c&&(null==v||null==(e=v.current)||e.setVolume({volume:c}))},[c]),e.useEffect(function(){return function(){var e;null==(e=v.current)||e.endSession()}},[]),{startSession:function(e){try{var r,l,a=function(r){return l?r:function(r,l){try{var a=(I=i(D=o((null==e?void 0:e.serverLocation)||s)),A=u(D),f.current=n.Conversation.startSession(t({},null!=d?d:{},null!=e?e:{},{origin:I,livekitUrl:A,overrides:t({},null!=(c=null==d?void 0:d.overrides)?c:{},null!=(m=null==e?void 0:e.overrides)?m:{},{client:t({},null!=(h=null==d||null==(p=d.overrides)?void 0:p.client)?h:{},null!=(S=null==e||null==(b=e.overrides)?void 0:b.client)?S:{},{source:(null==e||null==(y=e.overrides)||null==(y=y.client)?void 0:y.source)||(null==d||null==(P=d.overrides)||null==(P=P.client)?void 0:P.source)||"react_sdk",version:(null==e||null==(E=e.overrides)||null==(E=E.client)?void 0:E.version)||(null==d||null==(w=d.overrides)||null==(w=w.client)?void 0:w.version)||"0.9.1"})}),onConnect:(null==e?void 0:e.onConnect)||(null==d?void 0:d.onConnect),onDisconnect:(null==e?void 0:e.onDisconnect)||(null==d?void 0:d.onDisconnect),onError:(null==e?void 0:e.onError)||(null==d?void 0:d.onError),onMessage:(null==e?void 0:e.onMessage)||(null==d?void 0:d.onMessage),onAudio:(null==e?void 0:e.onAudio)||(null==d?void 0:d.onAudio),onDebug:(null==e?void 0:e.onDebug)||(null==d?void 0:d.onDebug),onUnhandledClientToolCall:(null==e?void 0:e.onUnhandledClientToolCall)||(null==d?void 0:d.onUnhandledClientToolCall),onVadScore:(null==e?void 0:e.onVadScore)||(null==d?void 0:d.onVadScore),onInterruption:(null==e?void 0:e.onInterruption)||(null==d?void 0:d.onInterruption),onAgentToolResponse:(null==e?void 0:e.onAgentToolResponse)||(null==d?void 0:d.onAgentToolResponse),onConversationMetadata:(null==e?void 0:e.onConversationMetadata)||(null==d?void 0:d.onConversationMetadata),onMCPToolCall:(null==e?void 0:e.onMCPToolCall)||(null==d?void 0:d.onMCPToolCall),onMCPConnectionStatus:(null==e?void 0:e.onMCPConnectionStatus)||(null==d?void 0:d.onMCPConnectionStatus),onAsrInitiationMetadata:(null==e?void 0:e.onAsrInitiationMetadata)||(null==d?void 0:d.onAsrInitiationMetadata),onAgentChatResponsePart:(null==e?void 0:e.onAgentChatResponsePart)||(null==d?void 0:d.onAgentChatResponsePart),onModeChange:function(n){var t,r=n.mode;M(r),null==(t=(null==e?void 0:e.onModeChange)||(null==d?void 0:d.onModeChange))||t({mode:r})},onStatusChange:function(n){var t,r=n.status;g(r),null==(t=(null==e?void 0:e.onStatusChange)||(null==d?void 0:d.onStatusChange))||t({status:r})},onCanSendFeedbackChange:function(n){var t,r=n.canSendFeedback;C(r),null==(t=(null==e?void 0:e.onCanSendFeedbackChange)||(null==d?void 0:d.onCanSendFeedbackChange))||t({canSendFeedback:r})}})),Promise.resolve(f.current).then(function(e){return v.current=e,void 0!==T.current&&v.current.setMicMuted(T.current),void 0!==R.current&&v.current.setVolume({volume:R.current}),v.current.getId()}))}catch(e){return l(!0,e)}var c,m,h,p,S,b,y,P,E,w,D,I,A;return a&&a.then?a.then(l.bind(null,!1),l.bind(null,!0)):l(!1,a)}(0,function(e,n){if(f.current=null,e)throw n;return n})};if(null!=(r=v.current)&&r.isOpen())return Promise.resolve(v.current.getId());var c=function(){if(f.current)return Promise.resolve(f.current).then(function(e){var n=e.getId();return l=1,n})}();return Promise.resolve(c&&c.then?c.then(a):a(c))}catch(e){return Promise.reject(e)}},endSession:function(){try{var e=v.current;return v.current=null,Promise.resolve(null==e?void 0:e.endSession()).then(function(){})}catch(e){return Promise.reject(e)}},setVolume:function(e){var n;null==(n=v.current)||n.setVolume({volume:e.volume})},getInputByteFrequencyData:function(){var e;return null==(e=v.current)?void 0:e.getInputByteFrequencyData()},getOutputByteFrequencyData:function(){var e;return null==(e=v.current)?void 0:e.getOutputByteFrequencyData()},getInputVolume:function(){var e,n;return null!=(e=null==(n=v.current)?void 0:n.getInputVolume())?e:0},getOutputVolume:function(){var e,n;return null!=(e=null==(n=v.current)?void 0:n.getOutputVolume())?e:0},sendFeedback:function(e){var n;null==(n=v.current)||n.sendFeedback(e)},getId:function(){var e;return null==(e=v.current)?void 0:e.getId()},sendContextualUpdate:function(e){var n;null==(n=v.current)||n.sendContextualUpdate(e)},sendUserMessage:function(e){var n;null==(n=v.current)||n.sendUserMessage(e)},sendUserActivity:function(){var e;null==(e=v.current)||e.sendUserActivity()},sendMCPToolApprovalResult:function(e,n){var t;null==(t=v.current)||t.sendMCPToolApprovalResult(e,n)},changeInputDevice:function(e){try{var n,t=function(e){if(n)return e;throw new Error("Device switching is only available for voice conversations")},r=function(){if(v.current&&"changeInputDevice"in v.current)return Promise.resolve(v.current.changeInputDevice(e)).then(function(e){return n=1,e})}();return Promise.resolve(r&&r.then?r.then(t):t(r))}catch(e){return Promise.reject(e)}},changeOutputDevice:function(e){try{var n,t=function(e){if(n)return e;throw new Error("Device switching is only available for voice conversations")},r=function(){if(v.current&&"changeOutputDevice"in v.current)return Promise.resolve(v.current.changeOutputDevice(e)).then(function(e){return n=1,e})}();return Promise.resolve(r&&r.then?r.then(t):t(r))}catch(e){return Promise.reject(e)}},status:h,canSendFeedback:S,micMuted:a,isSpeaking:"speaking"===y}},exports.useScribe=function(r){void 0===r&&(r={});var o=r.onSessionStarted,i=r.onPartialTranscript,u=r.onFinalTranscript,l=r.onFinalTranscriptWithTimestamps,a=r.onError,c=r.onAuthError,s=r.onConnect,d=r.onDisconnect,v=r.token,f=r.modelId,m=r.baseUri,h=r.commitStrategy,g=r.vadSilenceThresholdSecs,p=r.vadThreshold,S=r.minSpeechDurationMs,C=r.minSilenceDurationMs,b=r.languageCode,y=r.microphone,M=r.audioFormat,T=r.sampleRate,R=r.autoConnect,P=void 0!==R&&R,E=e.useRef(null),w=e.useState("disconnected"),D=w[0],I=w[1],A=e.useState(""),k=A[0],F=A[1],O=e.useState([]),x=O[0],U=O[1],j=e.useState(null),L=j[0],V=j[1];e.useEffect(function(){return function(){var e;null==(e=E.current)||e.close()}},[]);var N=e.useCallback(function(e){void 0===e&&(e={});try{if(E.current)return console.warn("Already connected"),Promise.resolve();try{I("connecting"),V(null);var t=e.token||v,r=e.modelId||f;if(!t)throw new Error("Token is required");if(!r)throw new Error("Model ID is required");var R,P=e.microphone||y,w=e.audioFormat||M,D=e.sampleRate||T;if(P)R=n.Scribe.connect({token:t,modelId:r,baseUri:e.baseUri||m,commitStrategy:e.commitStrategy||h,vadSilenceThresholdSecs:e.vadSilenceThresholdSecs||g,vadThreshold:e.vadThreshold||p,minSpeechDurationMs:e.minSpeechDurationMs||S,minSilenceDurationMs:e.minSilenceDurationMs||C,languageCode:e.languageCode||b,microphone:P});else{if(!w||!D)throw new Error("Either microphone options or (audioFormat + sampleRate) must be provided");R=n.Scribe.connect({token:t,modelId:r,baseUri:e.baseUri||m,commitStrategy:e.commitStrategy||h,vadSilenceThresholdSecs:e.vadSilenceThresholdSecs||g,vadThreshold:e.vadThreshold||p,minSpeechDurationMs:e.minSpeechDurationMs||S,minSilenceDurationMs:e.minSilenceDurationMs||C,languageCode:e.languageCode||b,audioFormat:w,sampleRate:D})}E.current=R,R.on(n.RealtimeEvents.SESSION_STARTED,function(){I("connected"),null==o||o()}),R.on(n.RealtimeEvents.PARTIAL_TRANSCRIPT,function(e){var n=e;F(n.text),I("transcribing"),null==i||i(n)}),R.on(n.RealtimeEvents.FINAL_TRANSCRIPT,function(e){var n=e,t={id:Date.now()+"-"+Math.random(),text:n.text,timestamp:Date.now(),isFinal:!0};U(function(e){return[].concat(e,[t])}),F(""),null==u||u(n)}),R.on(n.RealtimeEvents.FINAL_TRANSCRIPT_WITH_TIMESTAMPS,function(e){var n=e,t={id:Date.now()+"-"+Math.random(),text:n.text,timestamp:Date.now(),isFinal:!0};U(function(e){return[].concat(e,[t])}),F(""),null==l||l(n)}),R.on(n.RealtimeEvents.ERROR,function(e){var n=e;V(n.error),I("error"),null==a||a(new Error(n.error))}),R.on(n.RealtimeEvents.AUTH_ERROR,function(e){var n=e;V(n.error),I("error"),null==c||c(n)}),R.on(n.RealtimeEvents.OPEN,function(){null==s||s()}),R.on(n.RealtimeEvents.CLOSE,function(){I("disconnected"),E.current=null,null==d||d()})}catch(e){var A=e instanceof Error?e.message:"Failed to connect";throw V(A),I("error"),e}return Promise.resolve()}catch(e){return Promise.reject(e)}},[v,f,m,h,g,p,S,C,b,y,M,T,o,i,u,l,a,c,s,d]),q=e.useCallback(function(){var e;null==(e=E.current)||e.close(),E.current=null,I("disconnected")},[]),_=e.useCallback(function(e,n){if(!E.current)throw new Error("Not connected to Scribe");E.current.send(t({audioBase64:e},n))},[]),B=e.useCallback(function(){if(!E.current)throw new Error("Not connected to Scribe");E.current.commit()},[]),H=e.useCallback(function(){U([]),F("")},[]),W=e.useCallback(function(){return E.current},[]);return e.useEffect(function(){P&&N()},[P,N]),{status:D,isConnected:"connected"===D||"transcribing"===D,isTranscribing:"transcribing"===D,partialTranscript:k,finalTranscripts:x,error:L,connect:N,disconnect:q,sendAudio:_,commit:B,clearTranscripts:H,getConnection:W}};
2
2
  //# sourceMappingURL=lib.cjs.map