@videosdk.live/react-sdk 0.1.88 → 0.1.89

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -790,6 +790,7 @@ export function useMeeting({
790
790
  playbackHlsUrl: string;
791
791
  livestreamUrl: string;
792
792
  };
793
+ transcriptionState: string;
793
794
  selectedCameraDevice?: {
794
795
  deviceId: string;
795
796
  groupId: string;
@@ -987,6 +988,61 @@ export function useFile(): {
987
988
  }) => Promise<string | null>;
988
989
  };
989
990
 
991
+ /**
992
+ * @param onTranscriptionStateChanged - This will triggered when a realtime transcription state is changed.
993
+ * ---
994
+ * @param onTranscriptionText - This will triggered when a realtime transcription text is published.
995
+ * ---
996
+ * @returns This will return `startTranscription()` and `stopTranscription()` method. You can refer this [API Reference](https://docs.videosdk.live/react/api/sdk-reference/use-transcription#returns)
997
+ * ---
998
+ * **useTranscription example**
999
+ * ```js
1000
+ *
1001
+ * function onTranscriptionStateChanged(data) {
1002
+ * console.log('New State Payload:', data)
1003
+ * }
1004
+ *
1005
+ * function onTranscriptionText(data) {
1006
+ * console.log('Transcription Text Payload:', data);
1007
+ * }
1008
+ *
1009
+ * const { startTranscription, stopTranscription } = useTranscription(topic, {
1010
+ * onTranscriptionStateChanged,
1011
+ * onTranscriptionText,
1012
+ * });
1013
+ *
1014
+ * async function startTranscription()=>{
1015
+ * await startTranscription()
1016
+ * }
1017
+ *
1018
+ * async function stopTranscription()=>{
1019
+ * await stopTranscription()
1020
+ * }
1021
+ * ```
1022
+ */
1023
+ export function useTranscription({
1024
+ onTranscriptionStateChanged,
1025
+ onTranscriptionText
1026
+ }?: {
1027
+ onTranscriptionStateChanged?: (data: { id: string; status: string }) => void;
1028
+ onTranscriptionText?: (data: {
1029
+ participantId: string;
1030
+ participantName: string;
1031
+ text: string;
1032
+ timestamp: string;
1033
+ type: 'realtime';
1034
+ }) => void;
1035
+ }): {
1036
+ startTranscription: ({
1037
+ webhookUrl,
1038
+ modelConfig
1039
+ }: {
1040
+ webhookUrl?: string;
1041
+ modelConfig?: {};
1042
+ }) => void;
1043
+ stopTranscription: () => void;
1044
+ };
1045
+
990
1046
  /**
991
1047
  * @param microphoneId - It will be the id of the mic from which the audio should be captured.
992
1048
  * ---
@@ -1193,9 +1249,12 @@ export const Constants: {
1193
1249
  INVALID_LIVESTREAM_CONFIG: number;
1194
1250
  START_HLS_FAILED: number;
1195
1251
  STOP_HLS_FAILED: number;
1252
+ START_TRANSCRIPTION_FAILED: number;
1253
+ STOP_TRANSCRIPTION_FAILED: number;
1196
1254
  RECORDING_FAILED: number;
1197
1255
  LIVESTREAM_FAILED: number;
1198
1256
  HLS_FAILED: number;
1257
+ TRANSCRIPTION_FAILED: number;
1199
1258
  ERROR_GET_VIDEO_MEDIA: number;
1200
1259
  ERROR_GET_AUDIO_MEDIA: number;
1201
1260
  ERROR_GET_DISPLAY_MEDIA: number;
@@ -1241,6 +1300,12 @@ export const Constants: {
1241
1300
  HLS_STOPPING: string;
1242
1301
  HLS_STOPPED: string;
1243
1302
  };
1303
+ transcriptionEvents: {
1304
+ TRANSCRIPTION_STARTING: string;
1305
+ TRANSCRIPTION_STARTED: string;
1306
+ TRANSCRIPTION_STOPPING: string;
1307
+ TRANSCRIPTION_STOPPED: string;
1308
+ };
1244
1309
  modes: {
1245
1310
  CONFERENCE: string;
1246
1311
  VIEWER: string;