@unith-ai/react-native 0.0.6 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAiD,MAAM,OAAO,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAGzC,MAAM,MAAM,mBAAmB,GAAG;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,kBAAkB,CAAC,EAAE,OAAO,GAAG,aAAa,GAAG,QAAQ,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG;IAC/B,cAAc,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACpD,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,GAAG,CAAC;QAAC,gBAAgB,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACzF,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IACnC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAChC,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,WAAW,EAAE,MAAM,EAAE,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,gBAAgB,CAAC,EAAE,MAAM,IAAI,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,IAAI,CAAC;IACvB,kBAAkB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,eAAe,CAAC,EAAE,MAAM,IAAI,CAAC;IAC7B,aAAa,CAAC,EAAE,MAAM,IAAI,CAAC;IAC3B,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACrD,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,eAAe,EAAE,OAAO,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACtF,iBAAiB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACxD,wBAAwB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,IAAI,GAAG,KAAK,GAAG,YAAY,CAAA;KAAE,KAAK,IAAI,CAAC;IACnF,mCAAmC,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;CAC9E,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG;IAClC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACjC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,IAAI,CAAC;IACzB,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,MAAM,IAAI,CAAC;CACzB,CAAC;AAgRF,wBAAgB,eAAe,CAC7B,OAAO,EAAE,mBAAmB,EAC5B,MAAM,CAAC,EAAE,kBAAkB,EAC3B,MAAM,CAAC,EAAE,aAAa,GACrB,qBAAqB,CAEvB;AAED,MAAM,MAAM,0BAA0B,GAAG,kBAAkB,GAAG;IAC5D,OAAO,EAAE,mBAAmB,CAAC;IAC7B,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACnC,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,EACpC,OAAO,EACP,KAAK,EACL,YAAY,EACZ,YAAY,EACZ,GAAG,MAAM,EACV,EAAE,0BAA0B,qBAW5B"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAiD,MAAM,OAAO,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAGzC,MAAM,MAAM,mBAAmB,GAAG;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,kBAAkB,CAAC,EAAE,OAAO,GAAG,aAAa,GAAG,QAAQ,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG;IAC/B,cAAc,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACpD,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,GAAG,CAAC;QAAC,gBAAgB,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACzF,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IACnC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAChC,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,WAAW,EAAE,MAAM,EAAE,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,gBAAgB,CAAC,EAAE,MAAM,IAAI,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,IAAI,CAAC;IACvB,kBAAkB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,eAAe,CAAC,EAAE,MAAM,IAAI,CAAC;IAC7B,aAAa,CAAC,EAAE,MAAM,IAAI,CAAC;IAC3B,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACrD,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,eAAe,EAAE,OAAO,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACtF,iBAAiB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACxD,wBAAwB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,IAAI,GAAG,KAAK,GAAG,YAAY,CAAA;KAAE,KAAK,IAAI,CAAC;IACnF,mCAAmC,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;CAC9E,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG;IAClC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACjC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,IAAI,CAAC;IACzB,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,MAAM,IAAI,CAAC;CACzB,CAAC;AAsRF,wBAAgB,eAAe,CAC7B,OAAO,EAAE,mBAAmB,EAC5B,MAAM,CAAC,EAAE,kBAAkB,EAC3B,MAAM,CAAC,EAAE,aAAa,GACrB,qBAAqB,CAEvB;AAED,MAAM,MAAM,0BAA0B,GAAG,kBAAkB,GAAG;IAC5D,OAAO,EAAE,mBAAmB,CAAC;IAC7B,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACnC,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,EACpC,OAAO,EACP,KAAK,EACL,YAAY,EACZ,YAAY,EACZ,GAAG,MAAM,EACV,EAAE,0BAA0B,qBAW5B"}
package/dist/lib.js CHANGED
@@ -1,2 +1,2 @@
1
- var n=require("react"),e=require("react-native-webview");function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var t=/*#__PURE__*/a(n);function o(){return o=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},o.apply(null,arguments)}var i=["options","style","webviewProps","webClientUrl"];function s(e,a,t){return function(e,a,t){void 0===a&&(a={}),void 0===t&&(t={});var i=n.useRef(null),s=n.useState(!1),r=s[0],l=s[1],c=n.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message from React Native:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),d=n.useCallback(function(n,e){var a;null==(a=i.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=n.useCallback(function(){r||(console.log("Initializing conversation bridge with options:",e),d("INIT",o({},e)),l(!0))},[r,e,d]),S=n.useCallback(function(){console.log("WebView loaded, initializing bridge..."),u()},[u]),p=n.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:i,webViewProps:{source:{html:c},onMessage:p,onLoadEnd:S,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:r,startSession:function(){return d("START_SESSION")},endSession:function(){return d("END_SESSION")},sendMessage:function(n){return d("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return d("TOGGLE_MIC")},toggleMute:function(){return d("TOGGLE_MUTE")},keepSession:function(){return d("KEEP_SESSION")}}}(e,a,t)}exports.UnithConversationView=function(n){var a=n.style,r=n.webviewProps,l=n.webClientUrl,c=s(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,i),{webClientUrl:l});/*#__PURE__*/return t.default.createElement(e.WebView,o({ref:c.webViewRef},c.webViewProps,r,{style:a}))},exports.useConversation=s;
1
+ var n=require("react"),e=require("react-native-webview");function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var t=/*#__PURE__*/a(n);function o(){return o=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},o.apply(null,arguments)}var i=["options","style","webviewProps","webClientUrl"];function s(e,a,t){return function(e,a,t){void 0===a&&(a={}),void 0===t&&(t={});var i=n.useRef(null),s=n.useRef(!1),r=n.useState(!1),l=r[0],c=r[1],d=n.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),u=n.useCallback(function(n,e){var a,t={type:n,payload:e};console.log("Posting message to WebView:",n,e),null==(a=i.current)||a.postMessage(JSON.stringify(t))},[]),S=n.useCallback(function(){s.current?console.log("Already initialized, skipping..."):(console.log("WebView loaded, initializing bridge..."),console.log("Initializing conversation bridge with options:",e),s.current=!0,c(!0),setTimeout(function(){u("INIT",o({},e))},100))},[e,u]),p=n.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:i,webViewProps:{source:{html:d},onMessage:p,onLoadEnd:S,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:l,startSession:function(){return u("START_SESSION")},endSession:function(){return u("END_SESSION")},sendMessage:function(n){return u("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return u("TOGGLE_MIC")},toggleMute:function(){return u("TOGGLE_MUTE")},keepSession:function(){return u("KEEP_SESSION")}}}(e,a,t)}exports.UnithConversationView=function(n){var a=n.style,r=n.webviewProps,l=n.webClientUrl,c=s(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,i),{webClientUrl:l});/*#__PURE__*/return t.default.createElement(e.WebView,o({ref:c.webViewRef},c.webViewProps,r,{style:a}))},exports.useConversation=s;
2
2
  //# sourceMappingURL=lib.js.map
package/dist/lib.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"lib.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message from React Native:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const handleLoadEnd = useCallback(() => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n }, [init]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","handleLoadEnd","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"yaAiUgB,SAAAA,EACdC,EACAC,EACAC,GAEA,OA3HF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,YAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAOA,QAClB,WAAM,89BAAUR,EAAOS,cA3JzB,8DAiC+D,wwHA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAWA,YAAC,SAACC,EAAcC,GAAiBC,IAAAA,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAWA,YAAC,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,KACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAgBZ,EAAWA,YAAC,WAChCS,QAAQC,IAAI,0CACZF,GACF,EAAG,CAACA,IAEEK,EAAYb,EAAAA,YAChB,SAACc,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOT,KAAKU,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKd,MACX,IAAK,sBACHb,EAAO+B,gBAAP/B,EAAO+B,eAAiBJ,EAAKb,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAOgC,WAAPhC,EAAOgC,UAAYL,EAAKb,SACxB,MACF,IAAK,mBACHd,EAAOiC,cAAPjC,EAAOiC,aAAeN,EAAKb,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOyB,WAAPzB,EAAOyB,UAAYE,EAAKb,SACxB,MACF,IAAK,oBACHd,EAAOkC,eAAPlC,EAAOkC,cAAgBP,EAAKb,SAC5B,MACF,IAAK,iBACmB,MAAtBd,EAAOmC,iBAAPnC,EAAOmC,kBACP,MACF,IAAK,eACHnC,MAAAA,EAAOoC,eAAPpC,EAAOoC,gBACP,MACF,IAAK,kBACoB,MAAvBpC,EAAOqC,kBAAPrC,EAAOqC,mBACP,MACF,IAAK,UACHrC,MAAAA,EAAOsC,WAAPtC,EAAOsC,YACP,MACF,IAAK,qBACHtC,EAAOuC,eAAPvC,EAAOuC,cAAgBZ,EAAKb,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOwC,oBAAPxC,EAAOwC,mBAAqBb,EAAKb,SACjC,MACF,IAAK,aACHd,MAAAA,EAAOyC,0BAAPzC,EAAOyC,yBAA2Bd,EAAKb,SACvC,MACF,IAAK,uBACHd,EAAO0C,qCAAP1C,EAAO0C,oCAAsCf,EAAKb,SAClD,MACF,IAAK,YACHd,MAAAA,EAAO2C,mBAAP3C,EAAO2C,kBAAoBhB,EAAKb,SAChC,MACF,IAAK,cACHd,EAAO4C,SAAP5C,EAAO4C,QAAUjB,EAAKb,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA2C,aAAc,CACZC,OAAQ,CAAEtC,KAAAA,GACViB,UAAAA,EACAsB,UAAWvB,EACXwB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC/C,YAAAA,EACAgD,aAAc,WAAM,OAAA3C,EAAK,gBAAgB,EACzC4C,WAAY,WAAF,OAAQ5C,EAAK,cAAc,EACrC6C,YAAa,SAACC,GAAiB,OAAA9C,EAAK,eAAgB,CAAE8C,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAA/C,EAAK,aAAa,EAC1CgD,WAAY,WAAF,OAAQhD,EAAK,cAAc,EACrCiD,YAAa,kBAAMjD,EAAK,eAAe,EAE3C,CAOSkD,CAAU9D,EAASC,EAAQC,EACpC,+BASgB,SAAqB6D,GAMR,IAJ3BC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAtD,EAAYoD,EAAZpD,aAGMuD,EAAQnE,EANPgE,EAAP/D,mJAISmE,CAAAJ,EAAAK,GAEsC,CAAEzD,aAAAA,iBAEjD,OACE0D,wBAACC,UAAO9C,GACN+C,IAAKL,EAAM/D,YACP+D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
1
+ {"version":3,"file":"lib.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n console.log(\"Posting message to WebView:\", type, payload);\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n console.log(\"Already initialized, skipping...\");\n return;\n }\n\n console.log(\"WebView loaded, initializing bridge...\");\n console.log(\"Initializing conversation bridge with options:\", options);\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","initializedRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","message","console","log","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"yaAuUgB,SAAAA,EACdC,EACAC,EACAC,GAEA,OAjIF,SACEF,EACAC,EACAC,YADAD,IAAAA,EAA6B,CAAA,YAC7BC,IAAAA,EAAwB,CAAA,GAExB,IAAMC,EAAaC,EAAAA,OAAY,MACzBC,EAAiBD,EAAAA,QAAO,GAC9BE,EAAsCC,EAAAA,UAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAOA,QAClB,WAAA,MAzJF,s8BAyJkBT,EAAOU,cA5JzB,8DAgJF,wwHAYkE,EAC9D,CAACV,EAAOU,eAGJC,EAAOC,cAAY,SAACC,EAAcC,OAAiBC,EACjDC,EAAyB,CAAEH,KAAAA,EAAMC,QAAAA,GACvCG,QAAQC,IAAI,8BAA+BL,EAAMC,UACjDC,EAAAd,EAAWkB,UAAXJ,EAAoBK,YAAYC,KAAKC,UAAUN,GACjD,EAAG,IAEGO,EAAgBX,EAAWA,YAAC,WAC5BT,EAAegB,QACjBF,QAAQC,IAAI,qCAIdD,QAAQC,IAAI,0CACZD,QAAQC,IAAI,iDAAkDpB,GAE9DK,EAAegB,SAAU,EACzBZ,GAAe,GAGfiB,WAAW,WACTb,EAAK,OAAMc,KAAO3B,GACpB,EAAG,KACL,EAAG,CAACA,EAASa,IAEPe,EAAYd,EAAWA,YAC3B,SAACe,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKf,MACX,IAAK,sBACHd,EAAOiC,gBAAPjC,EAAOiC,eAAiBJ,EAAKd,SAC7B,MACF,IAAK,UACHf,MAAAA,EAAOkC,WAAPlC,EAAOkC,UAAYL,EAAKd,SACxB,MACF,IAAK,mBACHf,EAAOmC,cAAPnC,EAAOmC,aAAeN,EAAKd,SAC3B,MACF,IAAK,UACHf,MAAAA,EAAO2B,WAAP3B,EAAO2B,UAAYE,EAAKd,SACxB,MACF,IAAK,cACiB,MAApBf,EAAOoC,eAAPpC,EAAOoC,cAAgBP,EAAKd,SAC5B,MACF,IAAK,uBACHf,EAAOqC,iBAAPrC,EAAOqC,kBACP,MACF,IAAK,eACHrC,MAAAA,EAAOsC,eAAPtC,EAAOsC,gBACP,MACF,IAAK,kBACHtC,MAAAA,EAAOuC,kBAAPvC,EAAOuC,mBACP,MACF,IAAK,UACHvC,MAAAA,EAAOwC,WAAPxC,EAAOwC,YACP,MACF,IAAK,eACiB,MAApBxC,EAAOyC,eAAPzC,EAAOyC,cAAgBZ,EAAKd,SAC5B,MACF,IAAK,cACHf,MAAAA,EAAO0C,oBAAP1C,EAAO0C,mBAAqBb,EAAKd,SACjC,MACF,IAAK,aAC4B,MAA/Bf,EAAO2C,0BAAP3C,EAAO2C,yBAA2Bd,EAAKd,SACvC,MACF,IAAK,iBACuC,MAA1Cf,EAAO4C,qCAAP5C,EAAO4C,oCAAsCf,EAAKd,SAClD,MACF,IAAK,YACHf,MAAAA,EAAO6C,mBAAP7C,EAAO6C,kBAAoBhB,EAAKd,SAChC,MACF,IAAK,QACW,MAAdf,EAAO8C,SAAP9C,EAAO8C,QAAUjB,EAAKd,SAK5B,EACA,CAACf,IAGH,MAAO,CACLE,WAAAA,EACA6C,aAAc,CACZC,OAAQ,CAAEvC,KAAAA,GACVkB,UAAAA,EACAsB,UAAWzB,EACX0B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnChD,YAAAA,EACAiD,aAAc,WAAM,OAAA5C,EAAK,gBAAgB,EACzC6C,WAAY,WAAM,OAAA7C,EAAK,cAAc,EACrC8C,YAAa,SAACC,UAAiB/C,EAAK,eAAgB,CAAE+C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAMhD,EAAK,aAAa,EAC1CiD,WAAY,WAAF,OAAQjD,EAAK,cAAc,EACrCkD,YAAa,WAAF,OAAQlD,EAAK,eAAe,EAE3C,CAOSmD,CAAUhE,EAASC,EAAQC,EACpC,+BASgB,SAAqB+D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAvD,EAAYqD,EAAZrD,aAGMwD,EAAQrE,EANPkE,EAAPjE,mJAISqE,CAAAJ,EAAAK,GAEsC,CAAE1D,aAAAA,iBAEjD,OACE2D,EAAAA,sBAACC,EAAOA,QAAA7C,EACN8C,CAAAA,IAAKL,EAAMjE,YACPiE,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
@@ -1,2 +1,2 @@
1
- import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as s}from"react-native-webview";function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}const r=["options","style","webviewProps","webClientUrl"];function l(n,s,r){return function(n,s={},r={}){const l=e(null),[d,c]=a(!1),S=t(()=>`<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message from React Native:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("${r.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js"}");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>`,[r.webClientUrl]),p=o((n,e)=>{var a;null==(a=l.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=o(()=>{d||(console.log("Initializing conversation bridge with options:",n),p("INIT",i({},n)),c(!0))},[d,n,p]),g=o(()=>{console.log("WebView loaded, initializing bridge..."),u()},[u]),E=o(n=>{let e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==s.onStatusChange||s.onStatusChange(e.payload);break;case"CONNECT":null==s.onConnect||s.onConnect(e.payload);break;case"DISCONNECT":null==s.onDisconnect||s.onDisconnect(e.payload);break;case"MESSAGE":null==s.onMessage||s.onMessage(e.payload);break;case"SUGGESTIONS":null==s.onSuggestions||s.onSuggestions(e.payload);break;case"SPEAKING_START":null==s.onSpeakingStart||s.onSpeakingStart();break;case"SPEAKING_END":null==s.onSpeakingEnd||s.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==s.onTimeoutWarning||s.onTimeoutWarning();break;case"TIMEOUT":null==s.onTimeout||s.onTimeout();break;case"KEEP_SESSION":null==s.onKeepSession||s.onKeepSession(e.payload);break;case"MUTE_STATUS":null==s.onMuteStatusChange||s.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==s.onMicrophoneStatusChange||s.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==s.onMicrophoneSpeechRecognitionResult||s.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==s.onMicrophoneError||s.onMicrophoneError(e.payload);break;case"ERROR":null==s.onError||s.onError(e.payload)}},[s]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:E,onLoadEnd:g,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:d,startSession:()=>p("START_SESSION"),endSession:()=>p("END_SESSION"),sendMessage:n=>p("SEND_MESSAGE",{text:n}),toggleMicrophone:()=>p("TOGGLE_MIC"),toggleMute:()=>p("TOGGLE_MUTE"),keepSession:()=>p("KEEP_SESSION")}}(n,s,r)}function d(e){let{options:a,style:t,webviewProps:o,webClientUrl:d}=e;const c=l(a,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,r),{webClientUrl:d});/*#__PURE__*/return n.createElement(s,i({ref:c.webViewRef},c.webViewProps,o,{style:t}))}export{d as UnithConversationView,l as useConversation};
1
+ import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as s}from"react-native-webview";function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}const r=["options","style","webviewProps","webClientUrl"];function l(n,s,r){return function(n,s={},r={}){const l=e(null),d=e(!1),[c,p]=a(!1),S=t(()=>`<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("${r.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js"}");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>`,[r.webClientUrl]),u=o((n,e)=>{var a;const t={type:n,payload:e};console.log("Posting message to WebView:",n,e),null==(a=l.current)||a.postMessage(JSON.stringify(t))},[]),g=o(()=>{d.current?console.log("Already initialized, skipping..."):(console.log("WebView loaded, initializing bridge..."),console.log("Initializing conversation bridge with options:",n),d.current=!0,p(!0),setTimeout(()=>{u("INIT",i({},n))},100))},[n,u]),E=o(n=>{let e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==s.onStatusChange||s.onStatusChange(e.payload);break;case"CONNECT":null==s.onConnect||s.onConnect(e.payload);break;case"DISCONNECT":null==s.onDisconnect||s.onDisconnect(e.payload);break;case"MESSAGE":null==s.onMessage||s.onMessage(e.payload);break;case"SUGGESTIONS":null==s.onSuggestions||s.onSuggestions(e.payload);break;case"SPEAKING_START":null==s.onSpeakingStart||s.onSpeakingStart();break;case"SPEAKING_END":null==s.onSpeakingEnd||s.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==s.onTimeoutWarning||s.onTimeoutWarning();break;case"TIMEOUT":null==s.onTimeout||s.onTimeout();break;case"KEEP_SESSION":null==s.onKeepSession||s.onKeepSession(e.payload);break;case"MUTE_STATUS":null==s.onMuteStatusChange||s.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==s.onMicrophoneStatusChange||s.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==s.onMicrophoneSpeechRecognitionResult||s.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==s.onMicrophoneError||s.onMicrophoneError(e.payload);break;case"ERROR":null==s.onError||s.onError(e.payload)}},[s]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:E,onLoadEnd:g,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:c,startSession:()=>u("START_SESSION"),endSession:()=>u("END_SESSION"),sendMessage:n=>u("SEND_MESSAGE",{text:n}),toggleMicrophone:()=>u("TOGGLE_MIC"),toggleMute:()=>u("TOGGLE_MUTE"),keepSession:()=>u("KEEP_SESSION")}}(n,s,r)}function d(e){let{options:a,style:t,webviewProps:o,webClientUrl:d}=e;const c=l(a,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,r),{webClientUrl:d});/*#__PURE__*/return n.createElement(s,i({ref:c.webViewRef},c.webViewProps,o,{style:t}))}export{d as UnithConversationView,l as useConversation};
2
2
  //# sourceMappingURL=lib.modern.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"lib.modern.mjs","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message from React Native:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const handleLoadEnd = useCallback(() => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n }, [init]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","initialized","setInitialized","useState","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","handleLoadEnd","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,MAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,yBAiUgBC,EACdC,EACAC,EACAC,GAEA,OA3HF,SACEF,EACAC,EAA6B,CAAA,EAC7BC,EAAwB,IAExB,MAAMC,EAAaC,EAAY,OACxBC,EAAaC,GAAkBC,GAAS,GAEzCC,EAAOC,EACX,IAxJK,u9BAwJWP,EAAOQ,cA3JzB,q0HA4JE,CAACR,EAAOQ,eAGJC,EAAOC,EAAY,CAACC,EAAcC,SAAiBC,EAEvDA,OAAAA,EAAAZ,EAAWa,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,OAAMC,cAEtC,IAEGM,EAAOR,EAAY,KACnBP,IACJgB,QAAQC,IAAI,iDAAkDtB,GAC9DW,EAAK,OAAMY,EAAA,GACNvB,IAELM,GAAe,KACd,CAACD,EAAaL,EAASW,IAEpBa,EAAgBZ,EAAY,KAChCS,QAAQC,IAAI,0CACZF,KACC,CAACA,IAEEK,EAAYb,EACfc,IACC,IAAIC,EAA6B,KACjC,IACEA,EAAOT,KAAKU,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKd,MACX,IAAK,sBACHZ,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKb,SAC7B,MACF,IAAK,gBACHb,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKb,SACxB,MACF,IAAK,aACgB,MAAnBb,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKb,SAC3B,MACF,IAAK,UACHb,MAAAA,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKb,SACxB,MACF,IAAK,cACiB,MAApBb,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKb,SAC5B,MACF,IAAK,uBACHb,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,qBACHlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,UACa,MAAhBpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,qBACHrC,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKb,SAC5B,MACF,IAAK,cACsB,MAAzBb,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKb,SACjC,MACF,IAAK,aAC4B,MAA/Bb,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKb,SACvC,MACF,IAAK,uBACHb,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKb,SAClD,MACF,IAAK,YACHb,MAAAA,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKb,SAChC,MACF,IAAK,cACHb,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKb,WAM5B,CAACb,IAGH,MAAO,CACLE,aACA0C,aAAc,CACZC,OAAQ,CAAEtC,QACViB,YACAsB,UAAWvB,EACXwB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnChD,cACAiD,aAAcA,IAAM3C,EAAK,iBACzB4C,WAAYA,IAAM5C,EAAK,eACvB6C,YAAcC,GAAiB9C,EAAK,eAAgB,CAAE8C,SACtDC,iBAAkBA,IAAM/C,EAAK,cAC7BgD,WAAYA,IAAMhD,EAAK,eACvBiD,YAAaA,IAAMjD,EAAK,gBAE5B,CAOSkD,CAAU7D,EAASC,EAAQC,EACpC,CASgB,SAAA4D,EAAqBC,GAMR,IANS/D,QACpCA,EAAOgE,MACPA,EAAKC,aACLA,EAAYvD,aACZA,GAE2BqD,EAC3B,MAAMG,EAAQnE,EAAgBC,6IAFrBmE,CAAAJ,EAAAjE,GAEsC,CAAEY,8BAEjD,OACE0D,gBAACC,EAAO9C,EACN+C,CAAAA,IAAKJ,EAAM/D,YACP+D,EAAMrB,aACNoB,EAAY,CAChBD,MAAOA,IAGb"}
1
+ {"version":3,"file":"lib.modern.mjs","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n console.log(\"Posting message to WebView:\", type, payload);\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n console.log(\"Already initialized, skipping...\");\n return;\n }\n\n console.log(\"WebView loaded, initializing bridge...\");\n console.log(\"Initializing conversation bridge with options:\", options);\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","initializedRef","initialized","setInitialized","useState","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","message","console","log","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,MAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,yBAuUgBC,EACdC,EACAC,EACAC,GAEA,OAjIF,SACEF,EACAC,EAA6B,CAAE,EAC/BC,EAAwB,CAAA,GAExB,MAAMC,EAAaC,EAAY,MACzBC,EAAiBD,GAAO,IACvBE,EAAaC,GAAkBC,GAAS,GAEzCC,EAAOC,EACX,IAzJK,q8BAyJWR,EAAOS,cA5JzB,q0HA6JE,CAACT,EAAOS,eAGJC,EAAOC,EAAY,CAACC,EAAcC,KAAiB,IAAAC,EACvD,MAAMC,EAAyB,CAAEH,OAAMC,WACvCG,QAAQC,IAAI,8BAA+BL,EAAMC,GAC/B,OAAlBC,EAAAb,EAAWiB,UAAXJ,EAAoBK,YAAYC,KAAKC,UAAUN,KAC9C,IAEGO,EAAgBX,EAAY,KAC5BR,EAAee,QACjBF,QAAQC,IAAI,qCAIdD,QAAQC,IAAI,0CACZD,QAAQC,IAAI,iDAAkDnB,GAE9DK,EAAee,SAAU,EACzBb,GAAe,GAGfkB,WAAW,KACTb,EAAK,OAAMc,EAAO1B,CAAAA,EAAAA,KACjB,OACF,CAACA,EAASY,IAEPe,EAAYd,EACfe,IACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKf,MACX,IAAK,gBACHb,MAAAA,EAAOgC,gBAAPhC,EAAOgC,eAAiBJ,EAAKd,SAC7B,MACF,IAAK,UACa,MAAhBd,EAAOiC,WAAPjC,EAAOiC,UAAYL,EAAKd,SACxB,MACF,IAAK,aACgB,MAAnBd,EAAOkC,cAAPlC,EAAOkC,aAAeN,EAAKd,SAC3B,MACF,IAAK,gBACHd,EAAO0B,WAAP1B,EAAO0B,UAAYE,EAAKd,SACxB,MACF,IAAK,oBACHd,EAAOmC,eAAPnC,EAAOmC,cAAgBP,EAAKd,SAC5B,MACF,IAAK,iBACHd,MAAAA,EAAOoC,iBAAPpC,EAAOoC,kBACP,MACF,IAAK,eACHpC,MAAAA,EAAOqC,eAAPrC,EAAOqC,gBACP,MACF,IAAK,kBACHrC,MAAAA,EAAOsC,kBAAPtC,EAAOsC,mBACP,MACF,IAAK,UACa,MAAhBtC,EAAOuC,WAAPvC,EAAOuC,YACP,MACF,IAAK,eACiB,MAApBvC,EAAOwC,eAAPxC,EAAOwC,cAAgBZ,EAAKd,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOyC,oBAAPzC,EAAOyC,mBAAqBb,EAAKd,SACjC,MACF,IAAK,mBACHd,EAAO0C,0BAAP1C,EAAO0C,yBAA2Bd,EAAKd,SACvC,MACF,IAAK,iBACHd,MAAAA,EAAO2C,qCAAP3C,EAAO2C,oCAAsCf,EAAKd,SAClD,MACF,IAAK,YACHd,MAAAA,EAAO4C,mBAAP5C,EAAO4C,kBAAoBhB,EAAKd,SAChC,MACF,IAAK,QACW,MAAdd,EAAO6C,SAAP7C,EAAO6C,QAAUjB,EAAKd,WAM5B,CAACd,IAGH,MAAO,CACLE,aACA4C,aAAc,CACZC,OAAQ,CAAEvC,QACVkB,YACAsB,UAAWzB,EACX0B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnCjD,cACAkD,aAAcA,IAAM5C,EAAK,iBACzB6C,WAAYA,IAAM7C,EAAK,eACvB8C,YAAcC,GAAiB/C,EAAK,eAAgB,CAAE+C,SACtDC,iBAAkBA,IAAMhD,EAAK,cAC7BiD,WAAYA,IAAMjD,EAAK,eACvBkD,YAAaA,IAAMlD,EAAK,gBAE5B,CAOSmD,CAAU/D,EAASC,EAAQC,EACpC,UASgB8D,EAAqBC,GAAC,IAAAjE,QACpCA,EAAOkE,MACPA,EAAKC,aACLA,EAAYxD,aACZA,GAE2BsD,EAC3B,MAAMG,EAAQrE,EAAgBC,6IAFrBqE,CAAAJ,EAAAnE,GAEsC,CAAEa,8BAEjD,OACE2D,gBAACC,EAAO7C,EACN8C,CAAAA,IAAKJ,EAAMjE,YACPiE,EAAMrB,aACNoB,EAAY,CAChBD,MAAOA,IAGb"}
@@ -1,2 +1,2 @@
1
- import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as i}from"react-native-webview";function r(){return r=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},r.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function l(n,i,s){return function(n,i,s){void 0===i&&(i={}),void 0===s&&(s={});var l=e(null),d=a(!1),c=d[0],u=d[1],S=t(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message from React Native:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(s.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[s.webClientUrl]),p=o(function(n,e){var a;null==(a=l.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),g=o(function(){c||(console.log("Initializing conversation bridge with options:",n),p("INIT",r({},n)),u(!0))},[c,n,p]),E=o(function(){console.log("WebView loaded, initializing bridge..."),g()},[g]),h=o(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==i.onStatusChange||i.onStatusChange(e.payload);break;case"CONNECT":null==i.onConnect||i.onConnect(e.payload);break;case"DISCONNECT":null==i.onDisconnect||i.onDisconnect(e.payload);break;case"MESSAGE":null==i.onMessage||i.onMessage(e.payload);break;case"SUGGESTIONS":null==i.onSuggestions||i.onSuggestions(e.payload);break;case"SPEAKING_START":null==i.onSpeakingStart||i.onSpeakingStart();break;case"SPEAKING_END":null==i.onSpeakingEnd||i.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==i.onTimeoutWarning||i.onTimeoutWarning();break;case"TIMEOUT":null==i.onTimeout||i.onTimeout();break;case"KEEP_SESSION":null==i.onKeepSession||i.onKeepSession(e.payload);break;case"MUTE_STATUS":null==i.onMuteStatusChange||i.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==i.onMicrophoneStatusChange||i.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==i.onMicrophoneSpeechRecognitionResult||i.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==i.onMicrophoneError||i.onMicrophoneError(e.payload);break;case"ERROR":null==i.onError||i.onError(e.payload)}},[i]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:h,onLoadEnd:E,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:c,startSession:function(){return p("START_SESSION")},endSession:function(){return p("END_SESSION")},sendMessage:function(n){return p("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return p("TOGGLE_MIC")},toggleMute:function(){return p("TOGGLE_MUTE")},keepSession:function(){return p("KEEP_SESSION")}}}(n,i,s)}function d(e){var a=e.style,t=e.webviewProps,o=e.webClientUrl,d=l(e.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,s),{webClientUrl:o});/*#__PURE__*/return n.createElement(i,r({ref:d.webViewRef},d.webViewProps,t,{style:a}))}export{d as UnithConversationView,l as useConversation};
1
+ import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as i}from"react-native-webview";function r(){return r=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},r.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function l(n,i,s){return function(n,i,s){void 0===i&&(i={}),void 0===s&&(s={});var l=e(null),d=e(!1),c=a(!1),u=c[0],p=c[1],S=t(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(s.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[s.webClientUrl]),g=o(function(n,e){var a,t={type:n,payload:e};console.log("Posting message to WebView:",n,e),null==(a=l.current)||a.postMessage(JSON.stringify(t))},[]),E=o(function(){d.current?console.log("Already initialized, skipping..."):(console.log("WebView loaded, initializing bridge..."),console.log("Initializing conversation bridge with options:",n),d.current=!0,p(!0),setTimeout(function(){g("INIT",r({},n))},100))},[n,g]),y=o(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==i.onStatusChange||i.onStatusChange(e.payload);break;case"CONNECT":null==i.onConnect||i.onConnect(e.payload);break;case"DISCONNECT":null==i.onDisconnect||i.onDisconnect(e.payload);break;case"MESSAGE":null==i.onMessage||i.onMessage(e.payload);break;case"SUGGESTIONS":null==i.onSuggestions||i.onSuggestions(e.payload);break;case"SPEAKING_START":null==i.onSpeakingStart||i.onSpeakingStart();break;case"SPEAKING_END":null==i.onSpeakingEnd||i.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==i.onTimeoutWarning||i.onTimeoutWarning();break;case"TIMEOUT":null==i.onTimeout||i.onTimeout();break;case"KEEP_SESSION":null==i.onKeepSession||i.onKeepSession(e.payload);break;case"MUTE_STATUS":null==i.onMuteStatusChange||i.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==i.onMicrophoneStatusChange||i.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==i.onMicrophoneSpeechRecognitionResult||i.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==i.onMicrophoneError||i.onMicrophoneError(e.payload);break;case"ERROR":null==i.onError||i.onError(e.payload)}},[i]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:y,onLoadEnd:E,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:u,startSession:function(){return g("START_SESSION")},endSession:function(){return g("END_SESSION")},sendMessage:function(n){return g("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return g("TOGGLE_MIC")},toggleMute:function(){return g("TOGGLE_MUTE")},keepSession:function(){return g("KEEP_SESSION")}}}(n,i,s)}function d(e){var a=e.style,t=e.webviewProps,o=e.webClientUrl,d=l(e.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,s),{webClientUrl:o});/*#__PURE__*/return n.createElement(i,r({ref:d.webViewRef},d.webViewProps,t,{style:a}))}export{d as UnithConversationView,l as useConversation};
2
2
  //# sourceMappingURL=lib.module.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"lib.module.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message from React Native:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const handleLoadEnd = useCallback(() => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n }, [init]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","handleLoadEnd","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"6YAiUgB,SAAAA,EACdC,EACAC,EACAC,GAEA,OA3HF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAY,MAC/BC,EAAsCC,GAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EACX,WAAM,89BAAUR,EAAOS,cA3JzB,8DAiC+D,wwHA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAY,SAACC,EAAcC,GAAiBC,IAAAA,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAY,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,KACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAgBZ,EAAY,WAChCS,QAAQC,IAAI,0CACZF,GACF,EAAG,CAACA,IAEEK,EAAYb,EAChB,SAACc,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOT,KAAKU,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKd,MACX,IAAK,sBACHb,EAAO+B,gBAAP/B,EAAO+B,eAAiBJ,EAAKb,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAOgC,WAAPhC,EAAOgC,UAAYL,EAAKb,SACxB,MACF,IAAK,mBACHd,EAAOiC,cAAPjC,EAAOiC,aAAeN,EAAKb,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOyB,WAAPzB,EAAOyB,UAAYE,EAAKb,SACxB,MACF,IAAK,oBACHd,EAAOkC,eAAPlC,EAAOkC,cAAgBP,EAAKb,SAC5B,MACF,IAAK,iBACmB,MAAtBd,EAAOmC,iBAAPnC,EAAOmC,kBACP,MACF,IAAK,eACHnC,MAAAA,EAAOoC,eAAPpC,EAAOoC,gBACP,MACF,IAAK,kBACoB,MAAvBpC,EAAOqC,kBAAPrC,EAAOqC,mBACP,MACF,IAAK,UACHrC,MAAAA,EAAOsC,WAAPtC,EAAOsC,YACP,MACF,IAAK,qBACHtC,EAAOuC,eAAPvC,EAAOuC,cAAgBZ,EAAKb,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOwC,oBAAPxC,EAAOwC,mBAAqBb,EAAKb,SACjC,MACF,IAAK,aACHd,MAAAA,EAAOyC,0BAAPzC,EAAOyC,yBAA2Bd,EAAKb,SACvC,MACF,IAAK,uBACHd,EAAO0C,qCAAP1C,EAAO0C,oCAAsCf,EAAKb,SAClD,MACF,IAAK,YACHd,MAAAA,EAAO2C,mBAAP3C,EAAO2C,kBAAoBhB,EAAKb,SAChC,MACF,IAAK,cACHd,EAAO4C,SAAP5C,EAAO4C,QAAUjB,EAAKb,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA2C,aAAc,CACZC,OAAQ,CAAEtC,KAAAA,GACViB,UAAAA,EACAsB,UAAWvB,EACXwB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC/C,YAAAA,EACAgD,aAAc,WAAM,OAAA3C,EAAK,gBAAgB,EACzC4C,WAAY,WAAF,OAAQ5C,EAAK,cAAc,EACrC6C,YAAa,SAACC,GAAiB,OAAA9C,EAAK,eAAgB,CAAE8C,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAA/C,EAAK,aAAa,EAC1CgD,WAAY,WAAF,OAAQhD,EAAK,cAAc,EACrCiD,YAAa,kBAAMjD,EAAK,eAAe,EAE3C,CAOSkD,CAAU9D,EAASC,EAAQC,EACpC,CASgB,SAAA6D,EAAqBC,GAMR,IAJ3BC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAvD,EAAYqD,EAAZrD,aAGMwD,EAAQpE,EANPiE,EAAPhE,mJAISoE,CAAAJ,EAAAK,GAEsC,CAAE1D,aAAAA,iBAEjD,OACE2D,gBAACC,EAAO/C,GACNgD,IAAKL,EAAMhE,YACPgE,EAAMrB,aACNoB,GACJD,MAAOA,IAGb"}
1
+ {"version":3,"file":"lib.module.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n console.log(\"Posting message to WebView:\", type, payload);\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n console.log(\"Already initialized, skipping...\");\n return;\n }\n\n console.log(\"WebView loaded, initializing bridge...\");\n console.log(\"Initializing conversation bridge with options:\", options);\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","initializedRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","message","console","log","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"6YAuUgB,SAAAA,EACdC,EACAC,EACAC,GAEA,OAjIF,SACEF,EACAC,EACAC,YADAD,IAAAA,EAA6B,CAAA,YAC7BC,IAAAA,EAAwB,CAAA,GAExB,IAAMC,EAAaC,EAAY,MACzBC,EAAiBD,GAAO,GAC9BE,EAAsCC,GAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EACX,WAAA,MAzJF,s8BAyJkBT,EAAOU,cA5JzB,8DAgJF,wwHAYkE,EAC9D,CAACV,EAAOU,eAGJC,EAAOC,EAAY,SAACC,EAAcC,OAAiBC,EACjDC,EAAyB,CAAEH,KAAAA,EAAMC,QAAAA,GACvCG,QAAQC,IAAI,8BAA+BL,EAAMC,UACjDC,EAAAd,EAAWkB,UAAXJ,EAAoBK,YAAYC,KAAKC,UAAUN,GACjD,EAAG,IAEGO,EAAgBX,EAAY,WAC5BT,EAAegB,QACjBF,QAAQC,IAAI,qCAIdD,QAAQC,IAAI,0CACZD,QAAQC,IAAI,iDAAkDpB,GAE9DK,EAAegB,SAAU,EACzBZ,GAAe,GAGfiB,WAAW,WACTb,EAAK,OAAMc,KAAO3B,GACpB,EAAG,KACL,EAAG,CAACA,EAASa,IAEPe,EAAYd,EAChB,SAACe,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKf,MACX,IAAK,sBACHd,EAAOiC,gBAAPjC,EAAOiC,eAAiBJ,EAAKd,SAC7B,MACF,IAAK,UACHf,MAAAA,EAAOkC,WAAPlC,EAAOkC,UAAYL,EAAKd,SACxB,MACF,IAAK,mBACHf,EAAOmC,cAAPnC,EAAOmC,aAAeN,EAAKd,SAC3B,MACF,IAAK,UACHf,MAAAA,EAAO2B,WAAP3B,EAAO2B,UAAYE,EAAKd,SACxB,MACF,IAAK,cACiB,MAApBf,EAAOoC,eAAPpC,EAAOoC,cAAgBP,EAAKd,SAC5B,MACF,IAAK,uBACHf,EAAOqC,iBAAPrC,EAAOqC,kBACP,MACF,IAAK,eACHrC,MAAAA,EAAOsC,eAAPtC,EAAOsC,gBACP,MACF,IAAK,kBACHtC,MAAAA,EAAOuC,kBAAPvC,EAAOuC,mBACP,MACF,IAAK,UACHvC,MAAAA,EAAOwC,WAAPxC,EAAOwC,YACP,MACF,IAAK,eACiB,MAApBxC,EAAOyC,eAAPzC,EAAOyC,cAAgBZ,EAAKd,SAC5B,MACF,IAAK,cACHf,MAAAA,EAAO0C,oBAAP1C,EAAO0C,mBAAqBb,EAAKd,SACjC,MACF,IAAK,aAC4B,MAA/Bf,EAAO2C,0BAAP3C,EAAO2C,yBAA2Bd,EAAKd,SACvC,MACF,IAAK,iBACuC,MAA1Cf,EAAO4C,qCAAP5C,EAAO4C,oCAAsCf,EAAKd,SAClD,MACF,IAAK,YACHf,MAAAA,EAAO6C,mBAAP7C,EAAO6C,kBAAoBhB,EAAKd,SAChC,MACF,IAAK,QACW,MAAdf,EAAO8C,SAAP9C,EAAO8C,QAAUjB,EAAKd,SAK5B,EACA,CAACf,IAGH,MAAO,CACLE,WAAAA,EACA6C,aAAc,CACZC,OAAQ,CAAEvC,KAAAA,GACVkB,UAAAA,EACAsB,UAAWzB,EACX0B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnChD,YAAAA,EACAiD,aAAc,WAAM,OAAA5C,EAAK,gBAAgB,EACzC6C,WAAY,WAAM,OAAA7C,EAAK,cAAc,EACrC8C,YAAa,SAACC,UAAiB/C,EAAK,eAAgB,CAAE+C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAMhD,EAAK,aAAa,EAC1CiD,WAAY,WAAF,OAAQjD,EAAK,cAAc,EACrCkD,YAAa,WAAF,OAAQlD,EAAK,eAAe,EAE3C,CAOSmD,CAAUhE,EAASC,EAAQC,EACpC,CASgB,SAAA+D,EAAqBC,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAxD,EAAYsD,EAAZtD,aAGMyD,EAAQtE,EANPmE,EAAPlE,mJAISsE,CAAAJ,EAAAK,GAEsC,CAAE3D,aAAAA,iBAEjD,OACE4D,gBAACC,EAAO9C,EACN+C,CAAAA,IAAKL,EAAMlE,YACPkE,EAAMrB,aACNoB,GACJD,MAAOA,IAGb"}
package/dist/lib.umd.js CHANGED
@@ -1,2 +1,2 @@
1
- !function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("react"),require("react-native-webview")):"function"==typeof define&&define.amd?define(["exports","react","react-native-webview"],e):e((n||self).reactNative={},n.react,n.reactNativeWebview)}(this,function(n,e,a){function t(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var o=/*#__PURE__*/t(e);function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function r(n,a,t){return function(n,a,t){void 0===a&&(a={}),void 0===t&&(t={});var o=e.useRef(null),s=e.useState(!1),r=s[0],l=s[1],c=e.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message from React Native:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),d=e.useCallback(function(n,e){var a;null==(a=o.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=e.useCallback(function(){r||(console.log("Initializing conversation bridge with options:",n),d("INIT",i({},n)),l(!0))},[r,n,d]),p=e.useCallback(function(){console.log("WebView loaded, initializing bridge..."),u()},[u]),S=e.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:o,webViewProps:{source:{html:c},onMessage:S,onLoadEnd:p,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:r,startSession:function(){return d("START_SESSION")},endSession:function(){return d("END_SESSION")},sendMessage:function(n){return d("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return d("TOGGLE_MIC")},toggleMute:function(){return d("TOGGLE_MUTE")},keepSession:function(){return d("KEEP_SESSION")}}}(n,a,t)}n.UnithConversationView=function(n){var e=n.style,t=n.webviewProps,l=n.webClientUrl,c=r(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,s),{webClientUrl:l});/*#__PURE__*/return o.default.createElement(a.WebView,i({ref:c.webViewRef},c.webViewProps,t,{style:e}))},n.useConversation=r});
1
+ !function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("react"),require("react-native-webview")):"function"==typeof define&&define.amd?define(["exports","react","react-native-webview"],e):e((n||self).reactNative={},n.react,n.reactNativeWebview)}(this,function(n,e,a){function t(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var o=/*#__PURE__*/t(e);function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function r(n,a,t){return function(n,a,t){void 0===a&&(a={}),void 0===t&&(t={});var o=e.useRef(null),s=e.useRef(!1),r=e.useState(!1),l=r[0],c=r[1],d=e.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log("Applying message:", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),u=e.useCallback(function(n,e){var a,t={type:n,payload:e};console.log("Posting message to WebView:",n,e),null==(a=o.current)||a.postMessage(JSON.stringify(t))},[]),p=e.useCallback(function(){s.current?console.log("Already initialized, skipping..."):(console.log("WebView loaded, initializing bridge..."),console.log("Initializing conversation bridge with options:",n),s.current=!0,c(!0),setTimeout(function(){u("INIT",i({},n))},100))},[n,u]),S=e.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:o,webViewProps:{source:{html:d},onMessage:S,onLoadEnd:p,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:l,startSession:function(){return u("START_SESSION")},endSession:function(){return u("END_SESSION")},sendMessage:function(n){return u("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return u("TOGGLE_MIC")},toggleMute:function(){return u("TOGGLE_MUTE")},keepSession:function(){return u("KEEP_SESSION")}}}(n,a,t)}n.UnithConversationView=function(n){var e=n.style,t=n.webviewProps,l=n.webClientUrl,c=r(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,s),{webClientUrl:l});/*#__PURE__*/return o.default.createElement(a.WebView,i({ref:c.webViewRef},c.webViewProps,t,{style:e}))},n.useConversation=r});
2
2
  //# sourceMappingURL=lib.umd.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"lib.umd.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message from React Native:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const handleLoadEnd = useCallback(() => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n }, [init]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","handleLoadEnd","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"ssBAiUgB,SAAAA,EACdC,EACAC,EACAC,GAEA,OA3HF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,YAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAOA,QAClB,WAAM,89BAAUR,EAAOS,cA3JzB,8DAiC+D,wwHA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAWA,YAAC,SAACC,EAAcC,GAAiBC,IAAAA,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAWA,YAAC,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,KACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAgBZ,EAAWA,YAAC,WAChCS,QAAQC,IAAI,0CACZF,GACF,EAAG,CAACA,IAEEK,EAAYb,EAAAA,YAChB,SAACc,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOT,KAAKU,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKd,MACX,IAAK,sBACHb,EAAO+B,gBAAP/B,EAAO+B,eAAiBJ,EAAKb,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAOgC,WAAPhC,EAAOgC,UAAYL,EAAKb,SACxB,MACF,IAAK,mBACHd,EAAOiC,cAAPjC,EAAOiC,aAAeN,EAAKb,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOyB,WAAPzB,EAAOyB,UAAYE,EAAKb,SACxB,MACF,IAAK,oBACHd,EAAOkC,eAAPlC,EAAOkC,cAAgBP,EAAKb,SAC5B,MACF,IAAK,iBACmB,MAAtBd,EAAOmC,iBAAPnC,EAAOmC,kBACP,MACF,IAAK,eACHnC,MAAAA,EAAOoC,eAAPpC,EAAOoC,gBACP,MACF,IAAK,kBACoB,MAAvBpC,EAAOqC,kBAAPrC,EAAOqC,mBACP,MACF,IAAK,UACHrC,MAAAA,EAAOsC,WAAPtC,EAAOsC,YACP,MACF,IAAK,qBACHtC,EAAOuC,eAAPvC,EAAOuC,cAAgBZ,EAAKb,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOwC,oBAAPxC,EAAOwC,mBAAqBb,EAAKb,SACjC,MACF,IAAK,aACHd,MAAAA,EAAOyC,0BAAPzC,EAAOyC,yBAA2Bd,EAAKb,SACvC,MACF,IAAK,uBACHd,EAAO0C,qCAAP1C,EAAO0C,oCAAsCf,EAAKb,SAClD,MACF,IAAK,YACHd,MAAAA,EAAO2C,mBAAP3C,EAAO2C,kBAAoBhB,EAAKb,SAChC,MACF,IAAK,cACHd,EAAO4C,SAAP5C,EAAO4C,QAAUjB,EAAKb,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA2C,aAAc,CACZC,OAAQ,CAAEtC,KAAAA,GACViB,UAAAA,EACAsB,UAAWvB,EACXwB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC/C,YAAAA,EACAgD,aAAc,WAAM,OAAA3C,EAAK,gBAAgB,EACzC4C,WAAY,WAAF,OAAQ5C,EAAK,cAAc,EACrC6C,YAAa,SAACC,GAAiB,OAAA9C,EAAK,eAAgB,CAAE8C,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAA/C,EAAK,aAAa,EAC1CgD,WAAY,WAAF,OAAQhD,EAAK,cAAc,EACrCiD,YAAa,kBAAMjD,EAAK,eAAe,EAE3C,CAOSkD,CAAU9D,EAASC,EAAQC,EACpC,yBASgB,SAAqB6D,GAMR,IAJ3BC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAtD,EAAYoD,EAAZpD,aAGMuD,EAAQnE,EANPgE,EAAP/D,mJAISmE,CAAAJ,EAAAK,GAEsC,CAAEzD,aAAAA,iBAEjD,OACE0D,wBAACC,UAAO9C,GACN+C,IAAKL,EAAM/D,YACP+D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
1
+ {"version":3,"file":"lib.umd.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n console.log(\"Applying message:\", msg);\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n console.log(\"Posting message to WebView:\", type, payload);\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n console.log(\"Already initialized, skipping...\");\n return;\n }\n\n console.log(\"WebView loaded, initializing bridge...\");\n console.log(\"Initializing conversation bridge with options:\", options);\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","initializedRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","message","console","log","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"ssBAuUgB,SAAAA,EACdC,EACAC,EACAC,GAEA,OAjIF,SACEF,EACAC,EACAC,YADAD,IAAAA,EAA6B,CAAA,YAC7BC,IAAAA,EAAwB,CAAA,GAExB,IAAMC,EAAaC,EAAAA,OAAY,MACzBC,EAAiBD,EAAAA,QAAO,GAC9BE,EAAsCC,EAAAA,UAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAOA,QAClB,WAAA,MAzJF,s8BAyJkBT,EAAOU,cA5JzB,8DAgJF,wwHAYkE,EAC9D,CAACV,EAAOU,eAGJC,EAAOC,cAAY,SAACC,EAAcC,OAAiBC,EACjDC,EAAyB,CAAEH,KAAAA,EAAMC,QAAAA,GACvCG,QAAQC,IAAI,8BAA+BL,EAAMC,UACjDC,EAAAd,EAAWkB,UAAXJ,EAAoBK,YAAYC,KAAKC,UAAUN,GACjD,EAAG,IAEGO,EAAgBX,EAAWA,YAAC,WAC5BT,EAAegB,QACjBF,QAAQC,IAAI,qCAIdD,QAAQC,IAAI,0CACZD,QAAQC,IAAI,iDAAkDpB,GAE9DK,EAAegB,SAAU,EACzBZ,GAAe,GAGfiB,WAAW,WACTb,EAAK,OAAMc,KAAO3B,GACpB,EAAG,KACL,EAAG,CAACA,EAASa,IAEPe,EAAYd,EAAWA,YAC3B,SAACe,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKf,MACX,IAAK,sBACHd,EAAOiC,gBAAPjC,EAAOiC,eAAiBJ,EAAKd,SAC7B,MACF,IAAK,UACHf,MAAAA,EAAOkC,WAAPlC,EAAOkC,UAAYL,EAAKd,SACxB,MACF,IAAK,mBACHf,EAAOmC,cAAPnC,EAAOmC,aAAeN,EAAKd,SAC3B,MACF,IAAK,UACHf,MAAAA,EAAO2B,WAAP3B,EAAO2B,UAAYE,EAAKd,SACxB,MACF,IAAK,cACiB,MAApBf,EAAOoC,eAAPpC,EAAOoC,cAAgBP,EAAKd,SAC5B,MACF,IAAK,uBACHf,EAAOqC,iBAAPrC,EAAOqC,kBACP,MACF,IAAK,eACHrC,MAAAA,EAAOsC,eAAPtC,EAAOsC,gBACP,MACF,IAAK,kBACHtC,MAAAA,EAAOuC,kBAAPvC,EAAOuC,mBACP,MACF,IAAK,UACHvC,MAAAA,EAAOwC,WAAPxC,EAAOwC,YACP,MACF,IAAK,eACiB,MAApBxC,EAAOyC,eAAPzC,EAAOyC,cAAgBZ,EAAKd,SAC5B,MACF,IAAK,cACHf,MAAAA,EAAO0C,oBAAP1C,EAAO0C,mBAAqBb,EAAKd,SACjC,MACF,IAAK,aAC4B,MAA/Bf,EAAO2C,0BAAP3C,EAAO2C,yBAA2Bd,EAAKd,SACvC,MACF,IAAK,iBACuC,MAA1Cf,EAAO4C,qCAAP5C,EAAO4C,oCAAsCf,EAAKd,SAClD,MACF,IAAK,YACHf,MAAAA,EAAO6C,mBAAP7C,EAAO6C,kBAAoBhB,EAAKd,SAChC,MACF,IAAK,QACW,MAAdf,EAAO8C,SAAP9C,EAAO8C,QAAUjB,EAAKd,SAK5B,EACA,CAACf,IAGH,MAAO,CACLE,WAAAA,EACA6C,aAAc,CACZC,OAAQ,CAAEvC,KAAAA,GACVkB,UAAAA,EACAsB,UAAWzB,EACX0B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnChD,YAAAA,EACAiD,aAAc,WAAM,OAAA5C,EAAK,gBAAgB,EACzC6C,WAAY,WAAM,OAAA7C,EAAK,cAAc,EACrC8C,YAAa,SAACC,UAAiB/C,EAAK,eAAgB,CAAE+C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAMhD,EAAK,aAAa,EAC1CiD,WAAY,WAAF,OAAQjD,EAAK,cAAc,EACrCkD,YAAa,WAAF,OAAQlD,EAAK,eAAe,EAE3C,CAOSmD,CAAUhE,EAASC,EAAQC,EACpC,yBASgB,SAAqB+D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAvD,EAAYqD,EAAZrD,aAGMwD,EAAQrE,EANPkE,EAAPjE,mJAISqE,CAAAJ,EAAAK,GAEsC,CAAE1D,aAAAA,iBAEjD,OACE2D,EAAAA,sBAACC,EAAOA,QAAA7C,EACN8C,CAAAA,IAAKL,EAAMjE,YACPiE,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@unith-ai/react-native",
3
- "version": "0.0.6",
3
+ "version": "0.0.7",
4
4
  "description": "React Native WebView wrapper for Unith AI digital humans",
5
5
  "main": "./dist/lib.js",
6
6
  "module": "./dist/lib.module.js",