@unith-ai/react-native 0.0.4 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts.map +1 -1
- package/dist/lib.js +1 -1
- package/dist/lib.js.map +1 -1
- package/dist/lib.modern.mjs +1 -1
- package/dist/lib.modern.mjs.map +1 -1
- package/dist/lib.module.js +1 -1
- package/dist/lib.module.js.map +1 -1
- package/dist/lib.umd.js +1 -1
- package/dist/lib.umd.js.map +1 -1
- package/package.json +1 -1
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAiD,MAAM,OAAO,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAGzC,MAAM,MAAM,mBAAmB,GAAG;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,kBAAkB,CAAC,EAAE,OAAO,GAAG,aAAa,GAAG,QAAQ,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG;IAC/B,cAAc,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACpD,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,GAAG,CAAC;QAAC,gBAAgB,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACzF,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IACnC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAChC,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,WAAW,EAAE,MAAM,EAAE,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,gBAAgB,CAAC,EAAE,MAAM,IAAI,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,IAAI,CAAC;IACvB,kBAAkB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,eAAe,CAAC,EAAE,MAAM,IAAI,CAAC;IAC7B,aAAa,CAAC,EAAE,MAAM,IAAI,CAAC;IAC3B,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACrD,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,eAAe,EAAE,OAAO,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACtF,iBAAiB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACxD,wBAAwB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,IAAI,GAAG,KAAK,GAAG,YAAY,CAAA;KAAE,KAAK,IAAI,CAAC;IACnF,mCAAmC,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;CAC9E,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG;IAClC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACjC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,IAAI,CAAC;IACzB,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,MAAM,IAAI,CAAC;CACzB,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAiD,MAAM,OAAO,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAGzC,MAAM,MAAM,mBAAmB,GAAG;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,kBAAkB,CAAC,EAAE,OAAO,GAAG,aAAa,GAAG,QAAQ,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG;IAC/B,cAAc,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACpD,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,GAAG,CAAC;QAAC,gBAAgB,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACzF,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IACnC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAChC,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,WAAW,EAAE,MAAM,EAAE,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,gBAAgB,CAAC,EAAE,MAAM,IAAI,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,IAAI,CAAC;IACvB,kBAAkB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,eAAe,CAAC,EAAE,MAAM,IAAI,CAAC;IAC7B,aAAa,CAAC,EAAE,MAAM,IAAI,CAAC;IAC3B,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACrD,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,eAAe,EAAE,OAAO,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACtF,iBAAiB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACxD,wBAAwB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,IAAI,GAAG,KAAK,GAAG,YAAY,CAAA;KAAE,KAAK,IAAI,CAAC;IACnF,mCAAmC,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;CAC9E,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG;IAClC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACjC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,IAAI,CAAC;IACzB,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,MAAM,IAAI,CAAC;CACzB,CAAC;AAgRF,wBAAgB,eAAe,CAC7B,OAAO,EAAE,mBAAmB,EAC5B,MAAM,CAAC,EAAE,kBAAkB,EAC3B,MAAM,CAAC,EAAE,aAAa,GACrB,qBAAqB,CAEvB;AAED,MAAM,MAAM,0BAA0B,GAAG,kBAAkB,GAAG;IAC5D,OAAO,EAAE,mBAAmB,CAAC;IAC7B,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACnC,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,EACpC,OAAO,EACP,KAAK,EACL,YAAY,EACZ,YAAY,EACZ,GAAG,MAAM,EACV,EAAE,0BAA0B,qBAW5B"}
|
package/dist/lib.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
var n=require("react"),e=require("react-native-webview");function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var t=/*#__PURE__*/a(n);function o(){return o=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},o.apply(null,arguments)}var i=["options","style","webviewProps","webClientUrl"];function
|
|
1
|
+
var n=require("react"),e=require("react-native-webview");function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var t=/*#__PURE__*/a(n);function o(){return o=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},o.apply(null,arguments)}var i=["options","style","webviewProps","webClientUrl"];function s(e,a,t){return function(e,a,t){void 0===a&&(a={}),void 0===t&&(t={});var i=n.useRef(null),s=n.useState(!1),r=s[0],l=s[1],c=n.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message from React Native:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),d=n.useCallback(function(n,e){var a;null==(a=i.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=n.useCallback(function(){r||(console.log("Initializing conversation bridge with options:",e),d("INIT",o({},e)),l(!0))},[r,e,d]),S=n.useCallback(function(){console.log("WebView loaded, initializing bridge..."),u()},[u]),p=n.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:i,webViewProps:{source:{html:c},onMessage:p,onLoadEnd:S,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:r,startSession:function(){return d("START_SESSION")},endSession:function(){return d("END_SESSION")},sendMessage:function(n){return d("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return d("TOGGLE_MIC")},toggleMute:function(){return d("TOGGLE_MUTE")},keepSession:function(){return d("KEEP_SESSION")}}}(e,a,t)}exports.UnithConversationView=function(n){var a=n.style,r=n.webviewProps,l=n.webClientUrl,c=s(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,i),{webClientUrl:l});/*#__PURE__*/return t.default.createElement(e.WebView,o({ref:c.webViewRef},c.webViewProps,r,{style:a}))},exports.useConversation=s;
|
|
2
2
|
//# sourceMappingURL=lib.js.map
|
package/dist/lib.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lib.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n console.log(\"Initializing conversation with payload:\", payload);\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: () => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n },\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"kbA8TgBA,EACdC,EACAC,EACAC,GAEA,OAzHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,YAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAAA,QACX,u6BAAgBR,EAAOS,cA1JzB,8DAgC+D,o1HA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAAA,YAAY,SAACC,EAAcC,GAAiB,IAAAC,EAEvDA,OAAAA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAAA,YAAY,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,EAAA,CAAA,EACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAYZ,EAAWA,YAC3B,SAACa,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOR,KAAKS,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKb,MACX,IAAK,gBACkB,MAArBb,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKZ,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKZ,SACxB,MACF,IAAK,mBACHd,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKZ,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKZ,SACxB,MACF,IAAK,cACHd,MAAAA,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKZ,SAC5B,MACF,IAAK,uBACHd,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,eACiB,MAApBlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,UACa,MAAhBpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,eACHrC,MAAAA,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKZ,SAC5B,MACF,IAAK,oBACHd,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKZ,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKZ,SACvC,MACF,IAAK,iBACHd,MAAAA,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKZ,SAClD,MACF,IAAK,kBACHd,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKZ,SAChC,MACF,IAAK,QACW,MAAdd,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKZ,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA0C,aAAc,CACZC,OAAQ,CAAErC,KAAAA,GACVgB,UAAAA,EACAsB,UAAW,WACTzB,QAAQC,IAAI,0CACZF,GACF,EACA2B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC9C,YAAAA,EACA+C,aAAc,WAAF,OAAQ1C,EAAK,gBAAgB,EACzC2C,WAAY,kBAAM3C,EAAK,cAAc,EACrC4C,YAAa,SAACC,GAAY,OAAK7C,EAAK,eAAgB,CAAE6C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAM9C,EAAK,aAAa,EAC1C+C,WAAY,WAAM,OAAA/C,EAAK,cAAc,EACrCgD,YAAa,WAAF,OAAQhD,EAAK,eAAe,EAE3C,CAOSiD,CAAU7D,EAASC,EAAQC,EACpC,+BASgB,SAAqB4D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACArD,EAAYmD,EAAZnD,aAGMsD,EAAQlE,EANP+D,EAAP9D,mJAISkE,CAAAJ,EAAAK,GAEsC,CAAExD,aAAAA,iBAEjD,OACEyD,wBAACC,UAAO7C,GACN8C,IAAKL,EAAM9D,YACP8D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
|
|
1
|
+
{"version":3,"file":"lib.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message from React Native:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const handleLoadEnd = useCallback(() => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n }, [init]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","handleLoadEnd","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"yaAiUgB,SAAAA,EACdC,EACAC,EACAC,GAEA,OA3HF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,YAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAOA,QAClB,WAAM,89BAAUR,EAAOS,cA3JzB,8DAiC+D,wwHA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAWA,YAAC,SAACC,EAAcC,GAAiBC,IAAAA,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAWA,YAAC,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,KACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAgBZ,EAAWA,YAAC,WAChCS,QAAQC,IAAI,0CACZF,GACF,EAAG,CAACA,IAEEK,EAAYb,EAAAA,YAChB,SAACc,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOT,KAAKU,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKd,MACX,IAAK,sBACHb,EAAO+B,gBAAP/B,EAAO+B,eAAiBJ,EAAKb,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAOgC,WAAPhC,EAAOgC,UAAYL,EAAKb,SACxB,MACF,IAAK,mBACHd,EAAOiC,cAAPjC,EAAOiC,aAAeN,EAAKb,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOyB,WAAPzB,EAAOyB,UAAYE,EAAKb,SACxB,MACF,IAAK,oBACHd,EAAOkC,eAAPlC,EAAOkC,cAAgBP,EAAKb,SAC5B,MACF,IAAK,iBACmB,MAAtBd,EAAOmC,iBAAPnC,EAAOmC,kBACP,MACF,IAAK,eACHnC,MAAAA,EAAOoC,eAAPpC,EAAOoC,gBACP,MACF,IAAK,kBACoB,MAAvBpC,EAAOqC,kBAAPrC,EAAOqC,mBACP,MACF,IAAK,UACHrC,MAAAA,EAAOsC,WAAPtC,EAAOsC,YACP,MACF,IAAK,qBACHtC,EAAOuC,eAAPvC,EAAOuC,cAAgBZ,EAAKb,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOwC,oBAAPxC,EAAOwC,mBAAqBb,EAAKb,SACjC,MACF,IAAK,aACHd,MAAAA,EAAOyC,0BAAPzC,EAAOyC,yBAA2Bd,EAAKb,SACvC,MACF,IAAK,uBACHd,EAAO0C,qCAAP1C,EAAO0C,oCAAsCf,EAAKb,SAClD,MACF,IAAK,YACHd,MAAAA,EAAO2C,mBAAP3C,EAAO2C,kBAAoBhB,EAAKb,SAChC,MACF,IAAK,cACHd,EAAO4C,SAAP5C,EAAO4C,QAAUjB,EAAKb,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA2C,aAAc,CACZC,OAAQ,CAAEtC,KAAAA,GACViB,UAAAA,EACAsB,UAAWvB,EACXwB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC/C,YAAAA,EACAgD,aAAc,WAAM,OAAA3C,EAAK,gBAAgB,EACzC4C,WAAY,WAAF,OAAQ5C,EAAK,cAAc,EACrC6C,YAAa,SAACC,GAAiB,OAAA9C,EAAK,eAAgB,CAAE8C,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAA/C,EAAK,aAAa,EAC1CgD,WAAY,WAAF,OAAQhD,EAAK,cAAc,EACrCiD,YAAa,kBAAMjD,EAAK,eAAe,EAE3C,CAOSkD,CAAU9D,EAASC,EAAQC,EACpC,+BASgB,SAAqB6D,GAMR,IAJ3BC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAtD,EAAYoD,EAAZpD,aAGMuD,EAAQnE,EANPgE,EAAP/D,mJAISmE,CAAAJ,EAAAK,GAEsC,CAAEzD,aAAAA,iBAEjD,OACE0D,wBAACC,UAAO9C,GACN+C,IAAKL,EAAM/D,YACP+D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
|
package/dist/lib.modern.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as s}from"react-native-webview";function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}const r=["options","style","webviewProps","webClientUrl"];function l(n,s,r){return function(n,s={},r={}){const l=e(null),[d,c]=a(!1),S=t(()=>`<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("${r.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js"}");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n
|
|
1
|
+
import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as s}from"react-native-webview";function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}const r=["options","style","webviewProps","webClientUrl"];function l(n,s,r){return function(n,s={},r={}){const l=e(null),[d,c]=a(!1),S=t(()=>`<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message from React Native:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("${r.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js"}");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>`,[r.webClientUrl]),p=o((n,e)=>{var a;null==(a=l.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=o(()=>{d||(console.log("Initializing conversation bridge with options:",n),p("INIT",i({},n)),c(!0))},[d,n,p]),g=o(()=>{console.log("WebView loaded, initializing bridge..."),u()},[u]),E=o(n=>{let e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==s.onStatusChange||s.onStatusChange(e.payload);break;case"CONNECT":null==s.onConnect||s.onConnect(e.payload);break;case"DISCONNECT":null==s.onDisconnect||s.onDisconnect(e.payload);break;case"MESSAGE":null==s.onMessage||s.onMessage(e.payload);break;case"SUGGESTIONS":null==s.onSuggestions||s.onSuggestions(e.payload);break;case"SPEAKING_START":null==s.onSpeakingStart||s.onSpeakingStart();break;case"SPEAKING_END":null==s.onSpeakingEnd||s.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==s.onTimeoutWarning||s.onTimeoutWarning();break;case"TIMEOUT":null==s.onTimeout||s.onTimeout();break;case"KEEP_SESSION":null==s.onKeepSession||s.onKeepSession(e.payload);break;case"MUTE_STATUS":null==s.onMuteStatusChange||s.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==s.onMicrophoneStatusChange||s.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==s.onMicrophoneSpeechRecognitionResult||s.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==s.onMicrophoneError||s.onMicrophoneError(e.payload);break;case"ERROR":null==s.onError||s.onError(e.payload)}},[s]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:E,onLoadEnd:g,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:d,startSession:()=>p("START_SESSION"),endSession:()=>p("END_SESSION"),sendMessage:n=>p("SEND_MESSAGE",{text:n}),toggleMicrophone:()=>p("TOGGLE_MIC"),toggleMute:()=>p("TOGGLE_MUTE"),keepSession:()=>p("KEEP_SESSION")}}(n,s,r)}function d(e){let{options:a,style:t,webviewProps:o,webClientUrl:d}=e;const c=l(a,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,r),{webClientUrl:d});/*#__PURE__*/return n.createElement(s,i({ref:c.webViewRef},c.webViewProps,o,{style:t}))}export{d as UnithConversationView,l as useConversation};
|
|
2
2
|
//# sourceMappingURL=lib.modern.mjs.map
|
package/dist/lib.modern.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lib.modern.mjs","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n console.log(\"Initializing conversation with payload:\", payload);\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: () => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n },\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","initialized","setInitialized","useState","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,MAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,gBA8TgB,SAAAC,EACdC,EACAC,EACAC,GAEA,OAzHF,SACEF,EACAC,EAA6B,CAAA,EAC7BC,EAAwB,IAExB,MAAMC,EAAaC,EAAY,OACxBC,EAAaC,GAAkBC,GAAS,GAEzCC,EAAOC,EACX,IAvJK,q5BAuJWP,EAAOQ,cA1JzB,i5HA2JE,CAACR,EAAOQ,eAGJC,EAAOC,EAAY,CAACC,EAAcC,KAAiBC,IAAAA,EAErC,OAAlBA,EAAAZ,EAAWa,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,OAAMC,cAEtC,IAEGM,EAAOR,EAAY,KACnBP,IACJgB,QAAQC,IAAI,iDAAkDtB,GAC9DW,EAAK,OAAMY,EACNvB,GAAAA,IAELM,GAAe,KACd,CAACD,EAAaL,EAASW,IAEpBa,EAAYZ,EACfa,IACC,IAAIC,EAA6B,KACjC,IACEA,EAAOR,KAAKS,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKb,MACX,IAAK,sBACHZ,EAAO6B,gBAAP7B,EAAO6B,eAAiBJ,EAAKZ,SAC7B,MACF,IAAK,UACHb,MAAAA,EAAO8B,WAAP9B,EAAO8B,UAAYL,EAAKZ,SACxB,MACF,IAAK,mBACHb,EAAO+B,cAAP/B,EAAO+B,aAAeN,EAAKZ,SAC3B,MACF,IAAK,UACa,MAAhBb,EAAOuB,WAAPvB,EAAOuB,UAAYE,EAAKZ,SACxB,MACF,IAAK,oBACHb,EAAOgC,eAAPhC,EAAOgC,cAAgBP,EAAKZ,SAC5B,MACF,IAAK,iBACHb,MAAAA,EAAOiC,iBAAPjC,EAAOiC,kBACP,MACF,IAAK,eACiB,MAApBjC,EAAOkC,eAAPlC,EAAOkC,gBACP,MACF,IAAK,kBACHlC,MAAAA,EAAOmC,kBAAPnC,EAAOmC,mBACP,MACF,IAAK,UACHnC,MAAAA,EAAOoC,WAAPpC,EAAOoC,YACP,MACF,IAAK,qBACHpC,EAAOqC,eAAPrC,EAAOqC,cAAgBZ,EAAKZ,SAC5B,MACF,IAAK,cACsB,MAAzBb,EAAOsC,oBAAPtC,EAAOsC,mBAAqBb,EAAKZ,SACjC,MACF,IAAK,aACHb,MAAAA,EAAOuC,0BAAPvC,EAAOuC,yBAA2Bd,EAAKZ,SACvC,MACF,IAAK,iBACuC,MAA1Cb,EAAOwC,qCAAPxC,EAAOwC,oCAAsCf,EAAKZ,SAClD,MACF,IAAK,kBACHb,EAAOyC,mBAAPzC,EAAOyC,kBAAoBhB,EAAKZ,SAChC,MACF,IAAK,QACHb,MAAAA,EAAO0C,SAAP1C,EAAO0C,QAAUjB,EAAKZ,WAM5B,CAACb,IAGH,MAAO,CACLE,aACAyC,aAAc,CACZC,OAAQ,CAAErC,QACVgB,YACAsB,UAAWA,KACTzB,QAAQC,IAAI,0CACZF,KAEF2B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC/C,cACAgD,aAAcA,IAAM1C,EAAK,iBACzB2C,WAAYA,IAAM3C,EAAK,eACvB4C,YAAcC,GAAiB7C,EAAK,eAAgB,CAAE6C,SACtDC,iBAAkBA,IAAM9C,EAAK,cAC7B+C,WAAYA,IAAM/C,EAAK,eACvBgD,YAAaA,IAAMhD,EAAK,gBAE5B,CAOSiD,CAAU5D,EAASC,EAAQC,EACpC,UASgB2D,EAAqBC,GAAC,IAAA9D,QACpCA,EAAO+D,MACPA,EAAKC,aACLA,EAAYtD,aACZA,GAE2BoD,EAC3B,MAAMG,EAAQlE,EAAgBC,6IAFrBkE,CAAAJ,EAAAhE,GAEsC,CAAEY,8BAEjD,OACEyD,gBAACC,EAAO7C,GACN8C,IAAKJ,EAAM9D,YACP8D,EAAMrB,aACNoB,EAAY,CAChBD,MAAOA,IAGb"}
|
|
1
|
+
{"version":3,"file":"lib.modern.mjs","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message from React Native:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const handleLoadEnd = useCallback(() => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n }, [init]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","initialized","setInitialized","useState","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","handleLoadEnd","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,MAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,yBAiUgBC,EACdC,EACAC,EACAC,GAEA,OA3HF,SACEF,EACAC,EAA6B,CAAA,EAC7BC,EAAwB,IAExB,MAAMC,EAAaC,EAAY,OACxBC,EAAaC,GAAkBC,GAAS,GAEzCC,EAAOC,EACX,IAxJK,u9BAwJWP,EAAOQ,cA3JzB,q0HA4JE,CAACR,EAAOQ,eAGJC,EAAOC,EAAY,CAACC,EAAcC,SAAiBC,EAEvDA,OAAAA,EAAAZ,EAAWa,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,OAAMC,cAEtC,IAEGM,EAAOR,EAAY,KACnBP,IACJgB,QAAQC,IAAI,iDAAkDtB,GAC9DW,EAAK,OAAMY,EAAA,GACNvB,IAELM,GAAe,KACd,CAACD,EAAaL,EAASW,IAEpBa,EAAgBZ,EAAY,KAChCS,QAAQC,IAAI,0CACZF,KACC,CAACA,IAEEK,EAAYb,EACfc,IACC,IAAIC,EAA6B,KACjC,IACEA,EAAOT,KAAKU,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKd,MACX,IAAK,sBACHZ,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKb,SAC7B,MACF,IAAK,gBACHb,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKb,SACxB,MACF,IAAK,aACgB,MAAnBb,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKb,SAC3B,MACF,IAAK,UACHb,MAAAA,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKb,SACxB,MACF,IAAK,cACiB,MAApBb,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKb,SAC5B,MACF,IAAK,uBACHb,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,qBACHlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,UACa,MAAhBpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,qBACHrC,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKb,SAC5B,MACF,IAAK,cACsB,MAAzBb,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKb,SACjC,MACF,IAAK,aAC4B,MAA/Bb,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKb,SACvC,MACF,IAAK,uBACHb,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKb,SAClD,MACF,IAAK,YACHb,MAAAA,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKb,SAChC,MACF,IAAK,cACHb,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKb,WAM5B,CAACb,IAGH,MAAO,CACLE,aACA0C,aAAc,CACZC,OAAQ,CAAEtC,QACViB,YACAsB,UAAWvB,EACXwB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnChD,cACAiD,aAAcA,IAAM3C,EAAK,iBACzB4C,WAAYA,IAAM5C,EAAK,eACvB6C,YAAcC,GAAiB9C,EAAK,eAAgB,CAAE8C,SACtDC,iBAAkBA,IAAM/C,EAAK,cAC7BgD,WAAYA,IAAMhD,EAAK,eACvBiD,YAAaA,IAAMjD,EAAK,gBAE5B,CAOSkD,CAAU7D,EAASC,EAAQC,EACpC,CASgB,SAAA4D,EAAqBC,GAMR,IANS/D,QACpCA,EAAOgE,MACPA,EAAKC,aACLA,EAAYvD,aACZA,GAE2BqD,EAC3B,MAAMG,EAAQnE,EAAgBC,6IAFrBmE,CAAAJ,EAAAjE,GAEsC,CAAEY,8BAEjD,OACE0D,gBAACC,EAAO9C,EACN+C,CAAAA,IAAKJ,EAAM/D,YACP+D,EAAMrB,aACNoB,EAAY,CAChBD,MAAOA,IAGb"}
|
package/dist/lib.module.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as i}from"react-native-webview";function r(){return r=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},r.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function l(n,i,s){return function(n,i,s){void 0===i&&(i={}),void 0===s&&(s={});var l=e(null),d=a(!1),c=d[0],u=d[1],S=t(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(s.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n
|
|
1
|
+
import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as i}from"react-native-webview";function r(){return r=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},r.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function l(n,i,s){return function(n,i,s){void 0===i&&(i={}),void 0===s&&(s={});var l=e(null),d=a(!1),c=d[0],u=d[1],S=t(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message from React Native:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(s.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[s.webClientUrl]),p=o(function(n,e){var a;null==(a=l.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),g=o(function(){c||(console.log("Initializing conversation bridge with options:",n),p("INIT",r({},n)),u(!0))},[c,n,p]),E=o(function(){console.log("WebView loaded, initializing bridge..."),g()},[g]),h=o(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==i.onStatusChange||i.onStatusChange(e.payload);break;case"CONNECT":null==i.onConnect||i.onConnect(e.payload);break;case"DISCONNECT":null==i.onDisconnect||i.onDisconnect(e.payload);break;case"MESSAGE":null==i.onMessage||i.onMessage(e.payload);break;case"SUGGESTIONS":null==i.onSuggestions||i.onSuggestions(e.payload);break;case"SPEAKING_START":null==i.onSpeakingStart||i.onSpeakingStart();break;case"SPEAKING_END":null==i.onSpeakingEnd||i.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==i.onTimeoutWarning||i.onTimeoutWarning();break;case"TIMEOUT":null==i.onTimeout||i.onTimeout();break;case"KEEP_SESSION":null==i.onKeepSession||i.onKeepSession(e.payload);break;case"MUTE_STATUS":null==i.onMuteStatusChange||i.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==i.onMicrophoneStatusChange||i.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==i.onMicrophoneSpeechRecognitionResult||i.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==i.onMicrophoneError||i.onMicrophoneError(e.payload);break;case"ERROR":null==i.onError||i.onError(e.payload)}},[i]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:h,onLoadEnd:E,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:c,startSession:function(){return p("START_SESSION")},endSession:function(){return p("END_SESSION")},sendMessage:function(n){return p("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return p("TOGGLE_MIC")},toggleMute:function(){return p("TOGGLE_MUTE")},keepSession:function(){return p("KEEP_SESSION")}}}(n,i,s)}function d(e){var a=e.style,t=e.webviewProps,o=e.webClientUrl,d=l(e.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,s),{webClientUrl:o});/*#__PURE__*/return n.createElement(i,r({ref:d.webViewRef},d.webViewProps,t,{style:a}))}export{d as UnithConversationView,l as useConversation};
|
|
2
2
|
//# sourceMappingURL=lib.module.js.map
|
package/dist/lib.module.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lib.module.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n console.log(\"Initializing conversation with payload:\", payload);\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: () => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n },\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"sZA8TgBA,EACdC,EACAC,EACAC,GAEA,OAzHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAY,MAC/BC,EAAsCC,GAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EACX,u6BAAgBR,EAAOS,cA1JzB,8DAgC+D,o1HA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAY,SAACC,EAAcC,GAAiB,IAAAC,EAEvDA,OAAAA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAY,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,EAAA,CAAA,EACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAYZ,EAChB,SAACa,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOR,KAAKS,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKb,MACX,IAAK,gBACkB,MAArBb,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKZ,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKZ,SACxB,MACF,IAAK,mBACHd,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKZ,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKZ,SACxB,MACF,IAAK,cACHd,MAAAA,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKZ,SAC5B,MACF,IAAK,uBACHd,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,eACiB,MAApBlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,UACa,MAAhBpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,eACHrC,MAAAA,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKZ,SAC5B,MACF,IAAK,oBACHd,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKZ,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKZ,SACvC,MACF,IAAK,iBACHd,MAAAA,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKZ,SAClD,MACF,IAAK,kBACHd,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKZ,SAChC,MACF,IAAK,QACW,MAAdd,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKZ,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA0C,aAAc,CACZC,OAAQ,CAAErC,KAAAA,GACVgB,UAAAA,EACAsB,UAAW,WACTzB,QAAQC,IAAI,0CACZF,GACF,EACA2B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC9C,YAAAA,EACA+C,aAAc,WAAF,OAAQ1C,EAAK,gBAAgB,EACzC2C,WAAY,kBAAM3C,EAAK,cAAc,EACrC4C,YAAa,SAACC,GAAY,OAAK7C,EAAK,eAAgB,CAAE6C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAM9C,EAAK,aAAa,EAC1C+C,WAAY,WAAM,OAAA/C,EAAK,cAAc,EACrCgD,YAAa,WAAF,OAAQhD,EAAK,eAAe,EAE3C,CAOSiD,CAAU7D,EAASC,EAAQC,EACpC,CASgB,SAAA4D,EAAqBC,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAtD,EAAYoD,EAAZpD,aAGMuD,EAAQnE,EANPgE,EAAP/D,mJAISmE,CAAAJ,EAAAK,GAEsC,CAAEzD,aAAAA,iBAEjD,OACE0D,gBAACC,EAAO9C,GACN+C,IAAKL,EAAM/D,YACP+D,EAAMrB,aACNoB,GACJD,MAAOA,IAGb"}
|
|
1
|
+
{"version":3,"file":"lib.module.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message from React Native:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const handleLoadEnd = useCallback(() => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n }, [init]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","handleLoadEnd","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"6YAiUgB,SAAAA,EACdC,EACAC,EACAC,GAEA,OA3HF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAY,MAC/BC,EAAsCC,GAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EACX,WAAM,89BAAUR,EAAOS,cA3JzB,8DAiC+D,wwHA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAY,SAACC,EAAcC,GAAiBC,IAAAA,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAY,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,KACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAgBZ,EAAY,WAChCS,QAAQC,IAAI,0CACZF,GACF,EAAG,CAACA,IAEEK,EAAYb,EAChB,SAACc,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOT,KAAKU,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKd,MACX,IAAK,sBACHb,EAAO+B,gBAAP/B,EAAO+B,eAAiBJ,EAAKb,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAOgC,WAAPhC,EAAOgC,UAAYL,EAAKb,SACxB,MACF,IAAK,mBACHd,EAAOiC,cAAPjC,EAAOiC,aAAeN,EAAKb,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOyB,WAAPzB,EAAOyB,UAAYE,EAAKb,SACxB,MACF,IAAK,oBACHd,EAAOkC,eAAPlC,EAAOkC,cAAgBP,EAAKb,SAC5B,MACF,IAAK,iBACmB,MAAtBd,EAAOmC,iBAAPnC,EAAOmC,kBACP,MACF,IAAK,eACHnC,MAAAA,EAAOoC,eAAPpC,EAAOoC,gBACP,MACF,IAAK,kBACoB,MAAvBpC,EAAOqC,kBAAPrC,EAAOqC,mBACP,MACF,IAAK,UACHrC,MAAAA,EAAOsC,WAAPtC,EAAOsC,YACP,MACF,IAAK,qBACHtC,EAAOuC,eAAPvC,EAAOuC,cAAgBZ,EAAKb,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOwC,oBAAPxC,EAAOwC,mBAAqBb,EAAKb,SACjC,MACF,IAAK,aACHd,MAAAA,EAAOyC,0BAAPzC,EAAOyC,yBAA2Bd,EAAKb,SACvC,MACF,IAAK,uBACHd,EAAO0C,qCAAP1C,EAAO0C,oCAAsCf,EAAKb,SAClD,MACF,IAAK,YACHd,MAAAA,EAAO2C,mBAAP3C,EAAO2C,kBAAoBhB,EAAKb,SAChC,MACF,IAAK,cACHd,EAAO4C,SAAP5C,EAAO4C,QAAUjB,EAAKb,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA2C,aAAc,CACZC,OAAQ,CAAEtC,KAAAA,GACViB,UAAAA,EACAsB,UAAWvB,EACXwB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC/C,YAAAA,EACAgD,aAAc,WAAM,OAAA3C,EAAK,gBAAgB,EACzC4C,WAAY,WAAF,OAAQ5C,EAAK,cAAc,EACrC6C,YAAa,SAACC,GAAiB,OAAA9C,EAAK,eAAgB,CAAE8C,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAA/C,EAAK,aAAa,EAC1CgD,WAAY,WAAF,OAAQhD,EAAK,cAAc,EACrCiD,YAAa,kBAAMjD,EAAK,eAAe,EAE3C,CAOSkD,CAAU9D,EAASC,EAAQC,EACpC,CASgB,SAAA6D,EAAqBC,GAMR,IAJ3BC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAvD,EAAYqD,EAAZrD,aAGMwD,EAAQpE,EANPiE,EAAPhE,mJAISoE,CAAAJ,EAAAK,GAEsC,CAAE1D,aAAAA,iBAEjD,OACE2D,gBAACC,EAAO/C,GACNgD,IAAKL,EAAMhE,YACPgE,EAAMrB,aACNoB,GACJD,MAAOA,IAGb"}
|
package/dist/lib.umd.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
!function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("react"),require("react-native-webview")):"function"==typeof define&&define.amd?define(["exports","react","react-native-webview"],e):e((n||self).reactNative={},n.react,n.reactNativeWebview)}(this,function(n,e,a){function t(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var o=/*#__PURE__*/t(e);function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}var
|
|
1
|
+
!function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("react"),require("react-native-webview")):"function"==typeof define&&define.amd?define(["exports","react","react-native-webview"],e):e((n||self).reactNative={},n.react,n.reactNativeWebview)}(this,function(n,e,a){function t(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var o=/*#__PURE__*/t(e);function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function r(n,a,t){return function(n,a,t){void 0===a&&(a={}),void 0===t&&(t={});var o=e.useRef(null),s=e.useState(!1),r=s[0],l=s[1],c=e.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message from React Native:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),d=e.useCallback(function(n,e){var a;null==(a=o.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=e.useCallback(function(){r||(console.log("Initializing conversation bridge with options:",n),d("INIT",i({},n)),l(!0))},[r,n,d]),p=e.useCallback(function(){console.log("WebView loaded, initializing bridge..."),u()},[u]),S=e.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:o,webViewProps:{source:{html:c},onMessage:S,onLoadEnd:p,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:r,startSession:function(){return d("START_SESSION")},endSession:function(){return d("END_SESSION")},sendMessage:function(n){return d("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return d("TOGGLE_MIC")},toggleMute:function(){return d("TOGGLE_MUTE")},keepSession:function(){return d("KEEP_SESSION")}}}(n,a,t)}n.UnithConversationView=function(n){var e=n.style,t=n.webviewProps,l=n.webClientUrl,c=r(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,s),{webClientUrl:l});/*#__PURE__*/return o.default.createElement(a.WebView,i({ref:c.webViewRef},c.webViewProps,t,{style:e}))},n.useConversation=r});
|
|
2
2
|
//# sourceMappingURL=lib.umd.js.map
|
package/dist/lib.umd.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lib.umd.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n console.log(\"Initializing conversation with payload:\", payload);\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: () => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n },\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"+sBA8TgBA,EACdC,EACAC,EACAC,GAEA,OAzHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,YAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAAA,QACX,u6BAAgBR,EAAOS,cA1JzB,8DAgC+D,o1HA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAAA,YAAY,SAACC,EAAcC,GAAiB,IAAAC,EAEvDA,OAAAA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAAA,YAAY,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,EAAA,CAAA,EACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAYZ,EAAWA,YAC3B,SAACa,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOR,KAAKS,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKb,MACX,IAAK,gBACkB,MAArBb,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKZ,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKZ,SACxB,MACF,IAAK,mBACHd,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKZ,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKZ,SACxB,MACF,IAAK,cACHd,MAAAA,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKZ,SAC5B,MACF,IAAK,uBACHd,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,eACiB,MAApBlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,UACa,MAAhBpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,eACHrC,MAAAA,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKZ,SAC5B,MACF,IAAK,oBACHd,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKZ,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKZ,SACvC,MACF,IAAK,iBACHd,MAAAA,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKZ,SAClD,MACF,IAAK,kBACHd,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKZ,SAChC,MACF,IAAK,QACW,MAAdd,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKZ,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA0C,aAAc,CACZC,OAAQ,CAAErC,KAAAA,GACVgB,UAAAA,EACAsB,UAAW,WACTzB,QAAQC,IAAI,0CACZF,GACF,EACA2B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC9C,YAAAA,EACA+C,aAAc,WAAF,OAAQ1C,EAAK,gBAAgB,EACzC2C,WAAY,kBAAM3C,EAAK,cAAc,EACrC4C,YAAa,SAACC,GAAY,OAAK7C,EAAK,eAAgB,CAAE6C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAM9C,EAAK,aAAa,EAC1C+C,WAAY,WAAM,OAAA/C,EAAK,cAAc,EACrCgD,YAAa,WAAF,OAAQhD,EAAK,eAAe,EAE3C,CAOSiD,CAAU7D,EAASC,EAAQC,EACpC,yBASgB,SAAqB4D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACArD,EAAYmD,EAAZnD,aAGMsD,EAAQlE,EANP+D,EAAP9D,mJAISkE,CAAAJ,EAAAK,GAEsC,CAAExD,aAAAA,iBAEjD,OACEyD,wBAACC,UAAO7C,GACN8C,IAAKL,EAAM9D,YACP8D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
|
|
1
|
+
{"version":3,"file":"lib.umd.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message from React Native:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const handleLoadEnd = useCallback(() => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n }, [init]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","handleLoadEnd","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"ssBAiUgB,SAAAA,EACdC,EACAC,EACAC,GAEA,OA3HF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,YAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAOA,QAClB,WAAM,89BAAUR,EAAOS,cA3JzB,8DAiC+D,wwHA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAWA,YAAC,SAACC,EAAcC,GAAiBC,IAAAA,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAWA,YAAC,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,KACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAgBZ,EAAWA,YAAC,WAChCS,QAAQC,IAAI,0CACZF,GACF,EAAG,CAACA,IAEEK,EAAYb,EAAAA,YAChB,SAACc,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOT,KAAKU,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKd,MACX,IAAK,sBACHb,EAAO+B,gBAAP/B,EAAO+B,eAAiBJ,EAAKb,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAOgC,WAAPhC,EAAOgC,UAAYL,EAAKb,SACxB,MACF,IAAK,mBACHd,EAAOiC,cAAPjC,EAAOiC,aAAeN,EAAKb,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOyB,WAAPzB,EAAOyB,UAAYE,EAAKb,SACxB,MACF,IAAK,oBACHd,EAAOkC,eAAPlC,EAAOkC,cAAgBP,EAAKb,SAC5B,MACF,IAAK,iBACmB,MAAtBd,EAAOmC,iBAAPnC,EAAOmC,kBACP,MACF,IAAK,eACHnC,MAAAA,EAAOoC,eAAPpC,EAAOoC,gBACP,MACF,IAAK,kBACoB,MAAvBpC,EAAOqC,kBAAPrC,EAAOqC,mBACP,MACF,IAAK,UACHrC,MAAAA,EAAOsC,WAAPtC,EAAOsC,YACP,MACF,IAAK,qBACHtC,EAAOuC,eAAPvC,EAAOuC,cAAgBZ,EAAKb,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOwC,oBAAPxC,EAAOwC,mBAAqBb,EAAKb,SACjC,MACF,IAAK,aACHd,MAAAA,EAAOyC,0BAAPzC,EAAOyC,yBAA2Bd,EAAKb,SACvC,MACF,IAAK,uBACHd,EAAO0C,qCAAP1C,EAAO0C,oCAAsCf,EAAKb,SAClD,MACF,IAAK,YACHd,MAAAA,EAAO2C,mBAAP3C,EAAO2C,kBAAoBhB,EAAKb,SAChC,MACF,IAAK,cACHd,EAAO4C,SAAP5C,EAAO4C,QAAUjB,EAAKb,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA2C,aAAc,CACZC,OAAQ,CAAEtC,KAAAA,GACViB,UAAAA,EACAsB,UAAWvB,EACXwB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC/C,YAAAA,EACAgD,aAAc,WAAM,OAAA3C,EAAK,gBAAgB,EACzC4C,WAAY,WAAF,OAAQ5C,EAAK,cAAc,EACrC6C,YAAa,SAACC,GAAiB,OAAA9C,EAAK,eAAgB,CAAE8C,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAA/C,EAAK,aAAa,EAC1CgD,WAAY,WAAF,OAAQhD,EAAK,cAAc,EACrCiD,YAAa,kBAAMjD,EAAK,eAAe,EAE3C,CAOSkD,CAAU9D,EAASC,EAAQC,EACpC,yBASgB,SAAqB6D,GAMR,IAJ3BC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAtD,EAAYoD,EAAZpD,aAGMuD,EAAQnE,EANPgE,EAAP/D,mJAISmE,CAAAJ,EAAAK,GAEsC,CAAEzD,aAAAA,iBAEjD,OACE0D,wBAACC,UAAO9C,GACN+C,IAAKL,EAAM/D,YACP+D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
|