@unith-ai/react-native 0.0.1 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAiD,MAAM,OAAO,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAGzC,MAAM,MAAM,mBAAmB,GAAG;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,kBAAkB,CAAC,EAAE,OAAO,GAAG,aAAa,GAAG,QAAQ,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG;IAC/B,cAAc,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACpD,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,GAAG,CAAC;QAAC,gBAAgB,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACzF,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IACnC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAChC,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,WAAW,EAAE,MAAM,EAAE,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,gBAAgB,CAAC,EAAE,MAAM,IAAI,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,IAAI,CAAC;IACvB,kBAAkB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,eAAe,CAAC,EAAE,MAAM,IAAI,CAAC;IAC7B,aAAa,CAAC,EAAE,MAAM,IAAI,CAAC;IAC3B,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACrD,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,eAAe,EAAE,OAAO,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACtF,iBAAiB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACxD,wBAAwB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,IAAI,GAAG,KAAK,GAAG,YAAY,CAAA;KAAE,KAAK,IAAI,CAAC;IACnF,mCAAmC,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;CAC9E,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG;IAClC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACjC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,IAAI,CAAC;IACzB,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,MAAM,IAAI,CAAC;CACzB,CAAC;AAoPF,wBAAgB,eAAe,CAC7B,OAAO,EAAE,mBAAmB,EAC5B,MAAM,CAAC,EAAE,kBAAkB,EAC3B,MAAM,CAAC,EAAE,aAAa,GACrB,qBAAqB,CAEvB;AAED,MAAM,MAAM,0BAA0B,GAAG,kBAAkB,GAAG;IAC5D,OAAO,EAAE,mBAAmB,CAAC;IAC7B,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACnC,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,EACpC,OAAO,EACP,KAAK,EACL,YAAY,EACZ,YAAY,EACZ,GAAG,MAAM,EACV,EAAE,0BAA0B,qBAW5B"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAiD,MAAM,OAAO,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAGzC,MAAM,MAAM,mBAAmB,GAAG;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,kBAAkB,CAAC,EAAE,OAAO,GAAG,aAAa,GAAG,QAAQ,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG;IAC/B,cAAc,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACpD,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,GAAG,CAAC;QAAC,gBAAgB,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACzF,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IACnC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAChC,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,WAAW,EAAE,MAAM,EAAE,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,gBAAgB,CAAC,EAAE,MAAM,IAAI,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,IAAI,CAAC;IACvB,kBAAkB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,eAAe,CAAC,EAAE,MAAM,IAAI,CAAC;IAC7B,aAAa,CAAC,EAAE,MAAM,IAAI,CAAC;IAC3B,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACrD,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,eAAe,EAAE,OAAO,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACtF,iBAAiB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACxD,wBAAwB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,IAAI,GAAG,KAAK,GAAG,YAAY,CAAA;KAAE,KAAK,IAAI,CAAC;IACnF,mCAAmC,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;CAC9E,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG;IAClC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACjC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,IAAI,CAAC;IACzB,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,MAAM,IAAI,CAAC;CACzB,CAAC;AA6QF,wBAAgB,eAAe,CAC7B,OAAO,EAAE,mBAAmB,EAC5B,MAAM,CAAC,EAAE,kBAAkB,EAC3B,MAAM,CAAC,EAAE,aAAa,GACrB,qBAAqB,CAEvB;AAED,MAAM,MAAM,0BAA0B,GAAG,kBAAkB,GAAG;IAC5D,OAAO,EAAE,mBAAmB,CAAC;IAC7B,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACnC,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,EACpC,OAAO,EACP,KAAK,EACL,YAAY,EACZ,YAAY,EACZ,GAAG,MAAM,EACV,EAAE,0BAA0B,qBAW5B"}
package/dist/lib.js CHANGED
@@ -1,2 +1,2 @@
1
- var n=require("react"),e=require("react-native-webview");function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var t=/*#__PURE__*/a(n);function o(){return o=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},o.apply(null,arguments)}var i=["options","style","webviewProps","webClientUrl"];function s(e,a,t){return function(e,a,t){void 0===a&&(a={}),void 0===t&&(t={});var i=n.useRef(null),s=n.useState(!1),r=s[0],l=s[1],d=n.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),c=n.useCallback(function(n,e){var a;null==(a=i.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=n.useCallback(function(){r||(c("INIT",o({},e)),l(!0))},[r,e,c]),S=n.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:i,webViewProps:{source:{html:d},onMessage:S,onLoadEnd:u,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!0,allowsInlineMediaPlayback:!0},initialized:r,startSession:function(){return c("START_SESSION")},endSession:function(){return c("END_SESSION")},sendMessage:function(n){return c("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return c("TOGGLE_MIC")},toggleMute:function(){return c("TOGGLE_MUTE")},keepSession:function(){return c("KEEP_SESSION")}}}(e,a,t)}exports.UnithConversationView=function(n){var a=n.style,r=n.webviewProps,l=n.webClientUrl,d=s(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,i),{webClientUrl:l});/*#__PURE__*/return t.default.createElement(e.WebView,o({ref:d.webViewRef},d.webViewProps,r,{style:a}))},exports.useConversation=s;
1
+ var n=require("react"),e=require("react-native-webview");function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var t=/*#__PURE__*/a(n);function o(){return o=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},o.apply(null,arguments)}var i=["options","style","webviewProps","webClientUrl"];function r(e,a,t){return function(e,a,t){void 0===a&&(a={}),void 0===t&&(t={});var i=n.useRef(null),r=n.useState(!1),s=r[0],l=r[1],d=n.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),c=n.useCallback(function(n,e){var a;null==(a=i.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=n.useCallback(function(){s||(console.log("Initializing conversation bridge with options:",e),c("INIT",o({},e)),l(!0))},[s,e,c]),S=n.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:i,webViewProps:{source:{html:d},onMessage:S,onLoadEnd:function(){console.log("WebView loaded, initializing bridge..."),u()},javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:s,startSession:function(){return c("START_SESSION")},endSession:function(){return c("END_SESSION")},sendMessage:function(n){return c("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return c("TOGGLE_MIC")},toggleMute:function(){return c("TOGGLE_MUTE")},keepSession:function(){return c("KEEP_SESSION")}}}(e,a,t)}exports.UnithConversationView=function(n){var a=n.style,s=n.webviewProps,l=n.webClientUrl,d=r(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,i),{webClientUrl:l});/*#__PURE__*/return t.default.createElement(e.WebView,o({ref:d.webViewRef},d.webViewProps,s,{style:a}))},exports.useConversation=r;
2
2
  //# sourceMappingURL=lib.js.map
package/dist/lib.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"lib.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string; \n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: init,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: true,\n allowsInlineMediaPlayback: true,\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"iXAAA,IAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,yBAqSgBC,EACdC,EACAC,EACAC,GAEA,OAlHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,EAAQA,UAAC,GAAxCC,EAAWF,EAAA,GAAEG,EAAcH,KAE5BI,EAAOC,EAAAA,QACX,WAAA,MArIF,s5BAqIkBR,EAAOS,cAxIzB,8DA6HF,+nGAWkE,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,cAAY,SAACC,EAAcC,GAAiB,IAAAC,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAWA,YAAC,WACnBN,IACJK,EAAK,OAAMU,EACNtB,GAAAA,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBW,EAAYV,cAChB,SAACW,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAON,KAAKO,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKX,MACX,IAAK,gBACkB,MAArBb,EAAO4B,gBAAP5B,EAAO4B,eAAiBJ,EAAKV,SAC7B,MACF,IAAK,UACa,MAAhBd,EAAO6B,WAAP7B,EAAO6B,UAAYL,EAAKV,SACxB,MACF,IAAK,aACgB,MAAnBd,EAAO8B,cAAP9B,EAAO8B,aAAeN,EAAKV,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOsB,WAAPtB,EAAOsB,UAAYE,EAAKV,SACxB,MACF,IAAK,cACHd,MAAAA,EAAO+B,eAAP/B,EAAO+B,cAAgBP,EAAKV,SAC5B,MACF,IAAK,iBACHd,MAAAA,EAAOgC,iBAAPhC,EAAOgC,kBACP,MACF,IAAK,eACHhC,MAAAA,EAAOiC,eAAPjC,EAAOiC,gBACP,MACF,IAAK,wBACHjC,EAAOkC,kBAAPlC,EAAOkC,mBACP,MACF,IAAK,gBACHlC,EAAOmC,WAAPnC,EAAOmC,YACP,MACF,IAAK,qBACHnC,EAAOoC,eAAPpC,EAAOoC,cAAgBZ,EAAKV,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOqC,oBAAPrC,EAAOqC,mBAAqBb,EAAKV,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOsC,0BAAPtC,EAAOsC,yBAA2Bd,EAAKV,SACvC,MACF,IAAK,iBACuC,MAA1Cd,EAAOuC,qCAAPvC,EAAOuC,oCAAsCf,EAAKV,SAClD,MACF,IAAK,YACqB,MAAxBd,EAAOwC,mBAAPxC,EAAOwC,kBAAoBhB,EAAKV,SAChC,MACF,IAAK,QACHd,MAAAA,EAAOyC,SAAPzC,EAAOyC,QAAUjB,EAAKV,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACAwC,aAAc,CACZC,OAAQ,CAAEnC,KAAAA,GACVc,UAAAA,EACAsB,UAAWxB,EACXyB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,GAE7BzC,YAAAA,EACA0C,aAAc,WAAM,OAAArC,EAAK,gBAAgB,EACzCsC,WAAY,kBAAMtC,EAAK,cAAc,EACrCuC,YAAa,SAACC,GAAY,OAAKxC,EAAK,eAAgB,CAAEwC,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAAzC,EAAK,aAAa,EAC1C0C,WAAY,kBAAM1C,EAAK,cAAc,EACrC2C,YAAa,WAAM,OAAA3C,EAAK,eAAe,EAE3C,CAOS4C,CAAUxD,EAASC,EAAQC,EACpC,wCASqCuD,OAEnCC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAhD,EAAY8C,EAAZ9C,aAGMiD,EAAQ7D,EANP0D,EAAPzD,mJAIS6D,CAAAJ,EAAA3D,GAEsC,CAAEa,aAAAA,iBAEjD,OACEmD,wBAACC,EAAOA,QAAAzC,EAAA,CACN0C,IAAKJ,EAAMzD,YACPyD,EAAMjB,aACNgB,EAAY,CAChBD,MAAOA,IAGb"}
1
+ {"version":3,"file":"lib.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: () => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n },\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"kbA8TgBA,EACdC,EACAC,EACAC,GAEA,OAzHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,YAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAAA,QACX,u6BAAgBR,EAAOS,cA1JzB,8DAgC+D,wwHA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAAA,YAAY,SAACC,EAAcC,GAAiB,IAAAC,EAEvDA,OAAAA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAAA,YAAY,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,EAAA,CAAA,EACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAYZ,EAAWA,YAC3B,SAACa,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOR,KAAKS,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKb,MACX,IAAK,gBACkB,MAArBb,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKZ,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKZ,SACxB,MACF,IAAK,mBACHd,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKZ,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKZ,SACxB,MACF,IAAK,cACHd,MAAAA,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKZ,SAC5B,MACF,IAAK,uBACHd,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,eACiB,MAApBlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,UACa,MAAhBpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,eACHrC,MAAAA,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKZ,SAC5B,MACF,IAAK,oBACHd,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKZ,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKZ,SACvC,MACF,IAAK,iBACHd,MAAAA,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKZ,SAClD,MACF,IAAK,kBACHd,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKZ,SAChC,MACF,IAAK,QACW,MAAdd,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKZ,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA0C,aAAc,CACZC,OAAQ,CAAErC,KAAAA,GACVgB,UAAAA,EACAsB,UAAW,WACTzB,QAAQC,IAAI,0CACZF,GACF,EACA2B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC9C,YAAAA,EACA+C,aAAc,WAAF,OAAQ1C,EAAK,gBAAgB,EACzC2C,WAAY,kBAAM3C,EAAK,cAAc,EACrC4C,YAAa,SAACC,GAAY,OAAK7C,EAAK,eAAgB,CAAE6C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAM9C,EAAK,aAAa,EAC1C+C,WAAY,WAAM,OAAA/C,EAAK,cAAc,EACrCgD,YAAa,WAAF,OAAQhD,EAAK,eAAe,EAE3C,CAOSiD,CAAU7D,EAASC,EAAQC,EACpC,+BASgB,SAAqB4D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACArD,EAAYmD,EAAZnD,aAGMsD,EAAQlE,EANP+D,EAAP9D,mJAISkE,CAAAJ,EAAAK,GAEsC,CAAExD,aAAAA,iBAEjD,OACEyD,wBAACC,UAAO7C,GACN8C,IAAKL,EAAM9D,YACP8D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
@@ -1,2 +1,2 @@
1
- import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as s}from"react-native-webview";function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}const r=["options","style","webviewProps","webClientUrl"];function l(n,s,r){return function(n,s={},r={}){const l=e(null),[d,c]=a(!1),S=t(()=>`<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("${r.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js"}");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>`,[r.webClientUrl]),u=o((n,e)=>{var a;null==(a=l.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),p=o(()=>{d||(u("INIT",i({},n)),c(!0))},[d,n,u]),E=o(n=>{let e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==s.onStatusChange||s.onStatusChange(e.payload);break;case"CONNECT":null==s.onConnect||s.onConnect(e.payload);break;case"DISCONNECT":null==s.onDisconnect||s.onDisconnect(e.payload);break;case"MESSAGE":null==s.onMessage||s.onMessage(e.payload);break;case"SUGGESTIONS":null==s.onSuggestions||s.onSuggestions(e.payload);break;case"SPEAKING_START":null==s.onSpeakingStart||s.onSpeakingStart();break;case"SPEAKING_END":null==s.onSpeakingEnd||s.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==s.onTimeoutWarning||s.onTimeoutWarning();break;case"TIMEOUT":null==s.onTimeout||s.onTimeout();break;case"KEEP_SESSION":null==s.onKeepSession||s.onKeepSession(e.payload);break;case"MUTE_STATUS":null==s.onMuteStatusChange||s.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==s.onMicrophoneStatusChange||s.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==s.onMicrophoneSpeechRecognitionResult||s.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==s.onMicrophoneError||s.onMicrophoneError(e.payload);break;case"ERROR":null==s.onError||s.onError(e.payload)}},[s]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:E,onLoadEnd:p,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!0,allowsInlineMediaPlayback:!0},initialized:d,startSession:()=>u("START_SESSION"),endSession:()=>u("END_SESSION"),sendMessage:n=>u("SEND_MESSAGE",{text:n}),toggleMicrophone:()=>u("TOGGLE_MIC"),toggleMute:()=>u("TOGGLE_MUTE"),keepSession:()=>u("KEEP_SESSION")}}(n,s,r)}function d(e){let{options:a,style:t,webviewProps:o,webClientUrl:d}=e;const c=l(a,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,r),{webClientUrl:d});/*#__PURE__*/return n.createElement(s,i({ref:c.webViewRef},c.webViewProps,o,{style:t}))}export{d as UnithConversationView,l as useConversation};
1
+ import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as s}from"react-native-webview";function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}const r=["options","style","webviewProps","webClientUrl"];function l(n,s,r){return function(n,s={},r={}){const l=e(null),[d,c]=a(!1),S=t(()=>`<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("${r.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js"}");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>`,[r.webClientUrl]),p=o((n,e)=>{var a;null==(a=l.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=o(()=>{d||(console.log("Initializing conversation bridge with options:",n),p("INIT",i({},n)),c(!0))},[d,n,p]),g=o(n=>{let e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==s.onStatusChange||s.onStatusChange(e.payload);break;case"CONNECT":null==s.onConnect||s.onConnect(e.payload);break;case"DISCONNECT":null==s.onDisconnect||s.onDisconnect(e.payload);break;case"MESSAGE":null==s.onMessage||s.onMessage(e.payload);break;case"SUGGESTIONS":null==s.onSuggestions||s.onSuggestions(e.payload);break;case"SPEAKING_START":null==s.onSpeakingStart||s.onSpeakingStart();break;case"SPEAKING_END":null==s.onSpeakingEnd||s.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==s.onTimeoutWarning||s.onTimeoutWarning();break;case"TIMEOUT":null==s.onTimeout||s.onTimeout();break;case"KEEP_SESSION":null==s.onKeepSession||s.onKeepSession(e.payload);break;case"MUTE_STATUS":null==s.onMuteStatusChange||s.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==s.onMicrophoneStatusChange||s.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==s.onMicrophoneSpeechRecognitionResult||s.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==s.onMicrophoneError||s.onMicrophoneError(e.payload);break;case"ERROR":null==s.onError||s.onError(e.payload)}},[s]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:g,onLoadEnd:()=>{console.log("WebView loaded, initializing bridge..."),u()},javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:d,startSession:()=>p("START_SESSION"),endSession:()=>p("END_SESSION"),sendMessage:n=>p("SEND_MESSAGE",{text:n}),toggleMicrophone:()=>p("TOGGLE_MIC"),toggleMute:()=>p("TOGGLE_MUTE"),keepSession:()=>p("KEEP_SESSION")}}(n,s,r)}function d(e){let{options:a,style:t,webviewProps:o,webClientUrl:d}=e;const c=l(a,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,r),{webClientUrl:d});/*#__PURE__*/return n.createElement(s,i({ref:c.webViewRef},c.webViewProps,o,{style:t}))}export{d as UnithConversationView,l as useConversation};
2
2
  //# sourceMappingURL=lib.modern.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"lib.modern.mjs","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string; \n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: init,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: true,\n allowsInlineMediaPlayback: true,\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","initialized","setInitialized","useState","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,MAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,gBAqSgB,SAAAC,EACdC,EACAC,EACAC,GAEA,OAlHF,SACEF,EACAC,EAA6B,CAAA,EAC7BC,EAAwB,CAAE,GAE1B,MAAMC,EAAaC,EAAY,OACxBC,EAAaC,GAAkBC,GAAS,GAEzCC,EAAOC,EACX,IArIK,q5BAqIWP,EAAOQ,cAxIzB,4rGAyIE,CAACR,EAAOQ,eAGJC,EAAOC,EAAY,CAACC,EAAcC,KAAiB,IAAAC,EAErC,OAAlBA,EAAAZ,EAAWa,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,OAAMC,cAEtC,IAEGM,EAAOR,EAAY,KACnBP,IACJM,EAAK,OAAMU,EACNrB,CAAAA,EAAAA,IAELM,GAAe,KACd,CAACD,EAAaL,EAASW,IAEpBW,EAAYV,EACfW,IACC,IAAIC,EAA6B,KACjC,IACEA,EAAON,KAAKO,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKX,MACX,IAAK,gBACHZ,MAAAA,EAAO2B,gBAAP3B,EAAO2B,eAAiBJ,EAAKV,SAC7B,MACF,IAAK,UACHb,MAAAA,EAAO4B,WAAP5B,EAAO4B,UAAYL,EAAKV,SACxB,MACF,IAAK,mBACHb,EAAO6B,cAAP7B,EAAO6B,aAAeN,EAAKV,SAC3B,MACF,IAAK,UACHb,MAAAA,EAAOqB,WAAPrB,EAAOqB,UAAYE,EAAKV,SACxB,MACF,IAAK,oBACHb,EAAO8B,eAAP9B,EAAO8B,cAAgBP,EAAKV,SAC5B,MACF,IAAK,iBACmB,MAAtBb,EAAO+B,iBAAP/B,EAAO+B,kBACP,MACF,IAAK,eACH/B,MAAAA,EAAOgC,eAAPhC,EAAOgC,gBACP,MACF,IAAK,wBACHhC,EAAOiC,kBAAPjC,EAAOiC,mBACP,MACF,IAAK,gBACHjC,EAAOkC,WAAPlC,EAAOkC,YACP,MACF,IAAK,eACiB,MAApBlC,EAAOmC,eAAPnC,EAAOmC,cAAgBZ,EAAKV,SAC5B,MACF,IAAK,cACHb,MAAAA,EAAOoC,oBAAPpC,EAAOoC,mBAAqBb,EAAKV,SACjC,MACF,IAAK,aACHb,MAAAA,EAAOqC,0BAAPrC,EAAOqC,yBAA2Bd,EAAKV,SACvC,MACF,IAAK,uBACHb,EAAOsC,qCAAPtC,EAAOsC,oCAAsCf,EAAKV,SAClD,MACF,IAAK,YACHb,MAAAA,EAAOuC,mBAAPvC,EAAOuC,kBAAoBhB,EAAKV,SAChC,MACF,IAAK,cACHb,EAAOwC,SAAPxC,EAAOwC,QAAUjB,EAAKV,WAM5B,CAACb,IAGH,MAAO,CACLE,aACAuC,aAAc,CACZC,OAAQ,CAAEnC,QACVc,YACAsB,UAAWxB,EACXyB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,GAE7B1C,cACA2C,aAAcA,IAAMrC,EAAK,iBACzBsC,WAAYA,IAAMtC,EAAK,eACvBuC,YAAcC,GAAiBxC,EAAK,eAAgB,CAAEwC,SACtDC,iBAAkBA,IAAMzC,EAAK,cAC7B0C,WAAYA,IAAM1C,EAAK,eACvB2C,YAAaA,IAAM3C,EAAK,gBAE5B,CAOS4C,CAAUvD,EAASC,EAAQC,EACpC,CASgB,SAAAsD,EAAqBC,OAACzD,QACpCA,EAAO0D,MACPA,EAAKC,aACLA,EAAYjD,aACZA,GAE2B+C,EAC3B,MAAMG,EAAQ7D,EAAgBC,6IAFrB6D,CAAAJ,EAAA3D,GAEsC,CAAEY,8BAEjD,OACEoD,gBAACC,EAAO1C,EAAA,CACN2C,IAAKJ,EAAMzD,YACPyD,EAAMlB,aACNiB,EACJD,CAAAA,MAAOA,IAGb"}
1
+ {"version":3,"file":"lib.modern.mjs","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: () => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n },\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","initialized","setInitialized","useState","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,MAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,gBA8TgB,SAAAC,EACdC,EACAC,EACAC,GAEA,OAzHF,SACEF,EACAC,EAA6B,CAAA,EAC7BC,EAAwB,IAExB,MAAMC,EAAaC,EAAY,OACxBC,EAAaC,GAAkBC,GAAS,GAEzCC,EAAOC,EACX,IAvJK,q5BAuJWP,EAAOQ,cA1JzB,q0HA2JE,CAACR,EAAOQ,eAGJC,EAAOC,EAAY,CAACC,EAAcC,KAAiBC,IAAAA,EAErC,OAAlBA,EAAAZ,EAAWa,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,OAAMC,cAEtC,IAEGM,EAAOR,EAAY,KACnBP,IACJgB,QAAQC,IAAI,iDAAkDtB,GAC9DW,EAAK,OAAMY,EACNvB,GAAAA,IAELM,GAAe,KACd,CAACD,EAAaL,EAASW,IAEpBa,EAAYZ,EACfa,IACC,IAAIC,EAA6B,KACjC,IACEA,EAAOR,KAAKS,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKb,MACX,IAAK,sBACHZ,EAAO6B,gBAAP7B,EAAO6B,eAAiBJ,EAAKZ,SAC7B,MACF,IAAK,UACHb,MAAAA,EAAO8B,WAAP9B,EAAO8B,UAAYL,EAAKZ,SACxB,MACF,IAAK,mBACHb,EAAO+B,cAAP/B,EAAO+B,aAAeN,EAAKZ,SAC3B,MACF,IAAK,UACa,MAAhBb,EAAOuB,WAAPvB,EAAOuB,UAAYE,EAAKZ,SACxB,MACF,IAAK,oBACHb,EAAOgC,eAAPhC,EAAOgC,cAAgBP,EAAKZ,SAC5B,MACF,IAAK,iBACHb,MAAAA,EAAOiC,iBAAPjC,EAAOiC,kBACP,MACF,IAAK,eACiB,MAApBjC,EAAOkC,eAAPlC,EAAOkC,gBACP,MACF,IAAK,kBACHlC,MAAAA,EAAOmC,kBAAPnC,EAAOmC,mBACP,MACF,IAAK,UACHnC,MAAAA,EAAOoC,WAAPpC,EAAOoC,YACP,MACF,IAAK,qBACHpC,EAAOqC,eAAPrC,EAAOqC,cAAgBZ,EAAKZ,SAC5B,MACF,IAAK,cACsB,MAAzBb,EAAOsC,oBAAPtC,EAAOsC,mBAAqBb,EAAKZ,SACjC,MACF,IAAK,aACHb,MAAAA,EAAOuC,0BAAPvC,EAAOuC,yBAA2Bd,EAAKZ,SACvC,MACF,IAAK,iBACuC,MAA1Cb,EAAOwC,qCAAPxC,EAAOwC,oCAAsCf,EAAKZ,SAClD,MACF,IAAK,kBACHb,EAAOyC,mBAAPzC,EAAOyC,kBAAoBhB,EAAKZ,SAChC,MACF,IAAK,QACHb,MAAAA,EAAO0C,SAAP1C,EAAO0C,QAAUjB,EAAKZ,WAM5B,CAACb,IAGH,MAAO,CACLE,aACAyC,aAAc,CACZC,OAAQ,CAAErC,QACVgB,YACAsB,UAAWA,KACTzB,QAAQC,IAAI,0CACZF,KAEF2B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC/C,cACAgD,aAAcA,IAAM1C,EAAK,iBACzB2C,WAAYA,IAAM3C,EAAK,eACvB4C,YAAcC,GAAiB7C,EAAK,eAAgB,CAAE6C,SACtDC,iBAAkBA,IAAM9C,EAAK,cAC7B+C,WAAYA,IAAM/C,EAAK,eACvBgD,YAAaA,IAAMhD,EAAK,gBAE5B,CAOSiD,CAAU5D,EAASC,EAAQC,EACpC,UASgB2D,EAAqBC,GAAC,IAAA9D,QACpCA,EAAO+D,MACPA,EAAKC,aACLA,EAAYtD,aACZA,GAE2BoD,EAC3B,MAAMG,EAAQlE,EAAgBC,6IAFrBkE,CAAAJ,EAAAhE,GAEsC,CAAEY,8BAEjD,OACEyD,gBAACC,EAAO7C,GACN8C,IAAKJ,EAAM9D,YACP8D,EAAMrB,aACNoB,EAAY,CAChBD,MAAOA,IAGb"}
@@ -1,2 +1,2 @@
1
- import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as i}from"react-native-webview";function r(){return r=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},r.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function d(n,i,s){return function(n,i,s){void 0===i&&(i={}),void 0===s&&(s={});var d=e(null),l=a(!1),c=l[0],u=l[1],S=t(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(s.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[s.webClientUrl]),p=o(function(n,e){var a;null==(a=d.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),E=o(function(){c||(p("INIT",r({},n)),u(!0))},[c,n,p]),g=o(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==i.onStatusChange||i.onStatusChange(e.payload);break;case"CONNECT":null==i.onConnect||i.onConnect(e.payload);break;case"DISCONNECT":null==i.onDisconnect||i.onDisconnect(e.payload);break;case"MESSAGE":null==i.onMessage||i.onMessage(e.payload);break;case"SUGGESTIONS":null==i.onSuggestions||i.onSuggestions(e.payload);break;case"SPEAKING_START":null==i.onSpeakingStart||i.onSpeakingStart();break;case"SPEAKING_END":null==i.onSpeakingEnd||i.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==i.onTimeoutWarning||i.onTimeoutWarning();break;case"TIMEOUT":null==i.onTimeout||i.onTimeout();break;case"KEEP_SESSION":null==i.onKeepSession||i.onKeepSession(e.payload);break;case"MUTE_STATUS":null==i.onMuteStatusChange||i.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==i.onMicrophoneStatusChange||i.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==i.onMicrophoneSpeechRecognitionResult||i.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==i.onMicrophoneError||i.onMicrophoneError(e.payload);break;case"ERROR":null==i.onError||i.onError(e.payload)}},[i]);return{webViewRef:d,webViewProps:{source:{html:S},onMessage:g,onLoadEnd:E,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!0,allowsInlineMediaPlayback:!0},initialized:c,startSession:function(){return p("START_SESSION")},endSession:function(){return p("END_SESSION")},sendMessage:function(n){return p("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return p("TOGGLE_MIC")},toggleMute:function(){return p("TOGGLE_MUTE")},keepSession:function(){return p("KEEP_SESSION")}}}(n,i,s)}function l(e){var a=e.style,t=e.webviewProps,o=e.webClientUrl,l=d(e.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,s),{webClientUrl:o});/*#__PURE__*/return n.createElement(i,r({ref:l.webViewRef},l.webViewProps,t,{style:a}))}export{l as UnithConversationView,d as useConversation};
1
+ import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as i}from"react-native-webview";function r(){return r=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},r.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function l(n,i,s){return function(n,i,s){void 0===i&&(i={}),void 0===s&&(s={});var l=e(null),d=a(!1),c=d[0],u=d[1],S=t(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(s.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[s.webClientUrl]),p=o(function(n,e){var a;null==(a=l.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),g=o(function(){c||(console.log("Initializing conversation bridge with options:",n),p("INIT",r({},n)),u(!0))},[c,n,p]),E=o(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==i.onStatusChange||i.onStatusChange(e.payload);break;case"CONNECT":null==i.onConnect||i.onConnect(e.payload);break;case"DISCONNECT":null==i.onDisconnect||i.onDisconnect(e.payload);break;case"MESSAGE":null==i.onMessage||i.onMessage(e.payload);break;case"SUGGESTIONS":null==i.onSuggestions||i.onSuggestions(e.payload);break;case"SPEAKING_START":null==i.onSpeakingStart||i.onSpeakingStart();break;case"SPEAKING_END":null==i.onSpeakingEnd||i.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==i.onTimeoutWarning||i.onTimeoutWarning();break;case"TIMEOUT":null==i.onTimeout||i.onTimeout();break;case"KEEP_SESSION":null==i.onKeepSession||i.onKeepSession(e.payload);break;case"MUTE_STATUS":null==i.onMuteStatusChange||i.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==i.onMicrophoneStatusChange||i.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==i.onMicrophoneSpeechRecognitionResult||i.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==i.onMicrophoneError||i.onMicrophoneError(e.payload);break;case"ERROR":null==i.onError||i.onError(e.payload)}},[i]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:E,onLoadEnd:function(){console.log("WebView loaded, initializing bridge..."),g()},javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:c,startSession:function(){return p("START_SESSION")},endSession:function(){return p("END_SESSION")},sendMessage:function(n){return p("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return p("TOGGLE_MIC")},toggleMute:function(){return p("TOGGLE_MUTE")},keepSession:function(){return p("KEEP_SESSION")}}}(n,i,s)}function d(e){var a=e.style,t=e.webviewProps,o=e.webClientUrl,d=l(e.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,s),{webClientUrl:o});/*#__PURE__*/return n.createElement(i,r({ref:d.webViewRef},d.webViewProps,t,{style:a}))}export{d as UnithConversationView,l as useConversation};
2
2
  //# sourceMappingURL=lib.module.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"lib.module.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string; \n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: init,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: true,\n allowsInlineMediaPlayback: true,\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,IAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,yBAqSgBC,EACdC,EACAC,EACAC,GAEA,OAlHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAY,MAC/BC,EAAsCC,GAAS,GAAxCC,EAAWF,EAAA,GAAEG,EAAcH,KAE5BI,EAAOC,EACX,WAAA,MArIF,s5BAqIkBR,EAAOS,cAxIzB,8DA6HF,+nGAWkE,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAY,SAACC,EAAcC,GAAiB,IAAAC,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAY,WACnBN,IACJK,EAAK,OAAMU,EACNtB,GAAAA,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBW,EAAYV,EAChB,SAACW,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAON,KAAKO,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKX,MACX,IAAK,gBACkB,MAArBb,EAAO4B,gBAAP5B,EAAO4B,eAAiBJ,EAAKV,SAC7B,MACF,IAAK,UACa,MAAhBd,EAAO6B,WAAP7B,EAAO6B,UAAYL,EAAKV,SACxB,MACF,IAAK,aACgB,MAAnBd,EAAO8B,cAAP9B,EAAO8B,aAAeN,EAAKV,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOsB,WAAPtB,EAAOsB,UAAYE,EAAKV,SACxB,MACF,IAAK,cACHd,MAAAA,EAAO+B,eAAP/B,EAAO+B,cAAgBP,EAAKV,SAC5B,MACF,IAAK,iBACHd,MAAAA,EAAOgC,iBAAPhC,EAAOgC,kBACP,MACF,IAAK,eACHhC,MAAAA,EAAOiC,eAAPjC,EAAOiC,gBACP,MACF,IAAK,wBACHjC,EAAOkC,kBAAPlC,EAAOkC,mBACP,MACF,IAAK,gBACHlC,EAAOmC,WAAPnC,EAAOmC,YACP,MACF,IAAK,qBACHnC,EAAOoC,eAAPpC,EAAOoC,cAAgBZ,EAAKV,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOqC,oBAAPrC,EAAOqC,mBAAqBb,EAAKV,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOsC,0BAAPtC,EAAOsC,yBAA2Bd,EAAKV,SACvC,MACF,IAAK,iBACuC,MAA1Cd,EAAOuC,qCAAPvC,EAAOuC,oCAAsCf,EAAKV,SAClD,MACF,IAAK,YACqB,MAAxBd,EAAOwC,mBAAPxC,EAAOwC,kBAAoBhB,EAAKV,SAChC,MACF,IAAK,QACHd,MAAAA,EAAOyC,SAAPzC,EAAOyC,QAAUjB,EAAKV,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACAwC,aAAc,CACZC,OAAQ,CAAEnC,KAAAA,GACVc,UAAAA,EACAsB,UAAWxB,EACXyB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,GAE7BzC,YAAAA,EACA0C,aAAc,WAAM,OAAArC,EAAK,gBAAgB,EACzCsC,WAAY,kBAAMtC,EAAK,cAAc,EACrCuC,YAAa,SAACC,GAAY,OAAKxC,EAAK,eAAgB,CAAEwC,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAAzC,EAAK,aAAa,EAC1C0C,WAAY,kBAAM1C,EAAK,cAAc,EACrC2C,YAAa,WAAM,OAAA3C,EAAK,eAAe,EAE3C,CAOS4C,CAAUxD,EAASC,EAAQC,EACpC,UASgBuD,EAAqBC,OAEnCC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAjD,EAAY+C,EAAZ/C,aAGMkD,EAAQ9D,EANP2D,EAAP1D,mJAIS8D,CAAAJ,EAAA5D,GAEsC,CAAEa,aAAAA,iBAEjD,OACEoD,gBAACC,EAAO1C,EAAA,CACN2C,IAAKJ,EAAM1D,YACP0D,EAAMlB,aACNiB,EAAY,CAChBD,MAAOA,IAGb"}
1
+ {"version":3,"file":"lib.module.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: () => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n },\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"sZA8TgBA,EACdC,EACAC,EACAC,GAEA,OAzHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAY,MAC/BC,EAAsCC,GAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EACX,u6BAAgBR,EAAOS,cA1JzB,8DAgC+D,wwHA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAY,SAACC,EAAcC,GAAiB,IAAAC,EAEvDA,OAAAA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAY,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,EAAA,CAAA,EACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAYZ,EAChB,SAACa,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOR,KAAKS,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKb,MACX,IAAK,gBACkB,MAArBb,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKZ,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKZ,SACxB,MACF,IAAK,mBACHd,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKZ,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKZ,SACxB,MACF,IAAK,cACHd,MAAAA,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKZ,SAC5B,MACF,IAAK,uBACHd,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,eACiB,MAApBlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,UACa,MAAhBpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,eACHrC,MAAAA,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKZ,SAC5B,MACF,IAAK,oBACHd,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKZ,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKZ,SACvC,MACF,IAAK,iBACHd,MAAAA,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKZ,SAClD,MACF,IAAK,kBACHd,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKZ,SAChC,MACF,IAAK,QACW,MAAdd,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKZ,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA0C,aAAc,CACZC,OAAQ,CAAErC,KAAAA,GACVgB,UAAAA,EACAsB,UAAW,WACTzB,QAAQC,IAAI,0CACZF,GACF,EACA2B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC9C,YAAAA,EACA+C,aAAc,WAAF,OAAQ1C,EAAK,gBAAgB,EACzC2C,WAAY,kBAAM3C,EAAK,cAAc,EACrC4C,YAAa,SAACC,GAAY,OAAK7C,EAAK,eAAgB,CAAE6C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAM9C,EAAK,aAAa,EAC1C+C,WAAY,WAAM,OAAA/C,EAAK,cAAc,EACrCgD,YAAa,WAAF,OAAQhD,EAAK,eAAe,EAE3C,CAOSiD,CAAU7D,EAASC,EAAQC,EACpC,CASgB,SAAA4D,EAAqBC,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAtD,EAAYoD,EAAZpD,aAGMuD,EAAQnE,EANPgE,EAAP/D,mJAISmE,CAAAJ,EAAAK,GAEsC,CAAEzD,aAAAA,iBAEjD,OACE0D,gBAACC,EAAO9C,GACN+C,IAAKL,EAAM/D,YACP+D,EAAMrB,aACNoB,GACJD,MAAOA,IAGb"}
package/dist/lib.umd.js CHANGED
@@ -1,2 +1,2 @@
1
- !function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("react"),require("react-native-webview")):"function"==typeof define&&define.amd?define(["exports","react","react-native-webview"],e):e((n||self).reactNative={},n.react,n.reactNativeWebview)}(this,function(n,e,t){function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var o=/*#__PURE__*/a(e);function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var t=arguments[e];for(var a in t)({}).hasOwnProperty.call(t,a)&&(n[a]=t[a])}return n},i.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function r(n,t,a){return function(n,t,a){void 0===t&&(t={}),void 0===a&&(a={});var o=e.useRef(null),s=e.useState(!1),r=s[0],l=s[1],d=e.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(a.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[a.webClientUrl]),c=e.useCallback(function(n,e){var t;null==(t=o.current)||t.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=e.useCallback(function(){r||(c("INIT",i({},n)),l(!0))},[r,n,c]),S=e.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==t.onStatusChange||t.onStatusChange(e.payload);break;case"CONNECT":null==t.onConnect||t.onConnect(e.payload);break;case"DISCONNECT":null==t.onDisconnect||t.onDisconnect(e.payload);break;case"MESSAGE":null==t.onMessage||t.onMessage(e.payload);break;case"SUGGESTIONS":null==t.onSuggestions||t.onSuggestions(e.payload);break;case"SPEAKING_START":null==t.onSpeakingStart||t.onSpeakingStart();break;case"SPEAKING_END":null==t.onSpeakingEnd||t.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==t.onTimeoutWarning||t.onTimeoutWarning();break;case"TIMEOUT":null==t.onTimeout||t.onTimeout();break;case"KEEP_SESSION":null==t.onKeepSession||t.onKeepSession(e.payload);break;case"MUTE_STATUS":null==t.onMuteStatusChange||t.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==t.onMicrophoneStatusChange||t.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==t.onMicrophoneSpeechRecognitionResult||t.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==t.onMicrophoneError||t.onMicrophoneError(e.payload);break;case"ERROR":null==t.onError||t.onError(e.payload)}},[t]);return{webViewRef:o,webViewProps:{source:{html:d},onMessage:S,onLoadEnd:u,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!0,allowsInlineMediaPlayback:!0},initialized:r,startSession:function(){return c("START_SESSION")},endSession:function(){return c("END_SESSION")},sendMessage:function(n){return c("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return c("TOGGLE_MIC")},toggleMute:function(){return c("TOGGLE_MUTE")},keepSession:function(){return c("KEEP_SESSION")}}}(n,t,a)}n.UnithConversationView=function(n){var e=n.style,a=n.webviewProps,l=n.webClientUrl,d=r(n.options,function(n,e){if(null==n)return{};var t={};for(var a in n)if({}.hasOwnProperty.call(n,a)){if(-1!==e.indexOf(a))continue;t[a]=n[a]}return t}(n,s),{webClientUrl:l});/*#__PURE__*/return o.default.createElement(t.WebView,i({ref:d.webViewRef},d.webViewProps,a,{style:e}))},n.useConversation=r});
1
+ !function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("react"),require("react-native-webview")):"function"==typeof define&&define.amd?define(["exports","react","react-native-webview"],e):e((n||self).reactNative={},n.react,n.reactNativeWebview)}(this,function(n,e,a){function t(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var o=/*#__PURE__*/t(e);function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}var r=["options","style","webviewProps","webClientUrl"];function s(n,a,t){return function(n,a,t){void 0===a&&(a={}),void 0===t&&(t={});var o=e.useRef(null),r=e.useState(!1),s=r[0],l=r[1],d=e.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),c=e.useCallback(function(n,e){var a;null==(a=o.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=e.useCallback(function(){s||(console.log("Initializing conversation bridge with options:",n),c("INIT",i({},n)),l(!0))},[s,n,c]),p=e.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:o,webViewProps:{source:{html:d},onMessage:p,onLoadEnd:function(){console.log("WebView loaded, initializing bridge..."),u()},javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:s,startSession:function(){return c("START_SESSION")},endSession:function(){return c("END_SESSION")},sendMessage:function(n){return c("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return c("TOGGLE_MIC")},toggleMute:function(){return c("TOGGLE_MUTE")},keepSession:function(){return c("KEEP_SESSION")}}}(n,a,t)}n.UnithConversationView=function(n){var e=n.style,t=n.webviewProps,l=n.webClientUrl,d=s(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,r),{webClientUrl:l});/*#__PURE__*/return o.default.createElement(a.WebView,i({ref:d.webViewRef},d.webViewProps,t,{style:e}))},n.useConversation=s});
2
2
  //# sourceMappingURL=lib.umd.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"lib.umd.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string; \n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: init,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: true,\n allowsInlineMediaPlayback: true,\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"8oBAAA,IAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,yBAqSgBC,EACdC,EACAC,EACAC,GAEA,OAlHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,EAAQA,UAAC,GAAxCC,EAAWF,EAAA,GAAEG,EAAcH,KAE5BI,EAAOC,EAAAA,QACX,WAAA,MArIF,s5BAqIkBR,EAAOS,cAxIzB,8DA6HF,+nGAWkE,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,cAAY,SAACC,EAAcC,GAAiB,IAAAC,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAWA,YAAC,WACnBN,IACJK,EAAK,OAAMU,EACNtB,GAAAA,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBW,EAAYV,cAChB,SAACW,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAON,KAAKO,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKX,MACX,IAAK,gBACkB,MAArBb,EAAO4B,gBAAP5B,EAAO4B,eAAiBJ,EAAKV,SAC7B,MACF,IAAK,UACa,MAAhBd,EAAO6B,WAAP7B,EAAO6B,UAAYL,EAAKV,SACxB,MACF,IAAK,aACgB,MAAnBd,EAAO8B,cAAP9B,EAAO8B,aAAeN,EAAKV,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOsB,WAAPtB,EAAOsB,UAAYE,EAAKV,SACxB,MACF,IAAK,cACHd,MAAAA,EAAO+B,eAAP/B,EAAO+B,cAAgBP,EAAKV,SAC5B,MACF,IAAK,iBACHd,MAAAA,EAAOgC,iBAAPhC,EAAOgC,kBACP,MACF,IAAK,eACHhC,MAAAA,EAAOiC,eAAPjC,EAAOiC,gBACP,MACF,IAAK,wBACHjC,EAAOkC,kBAAPlC,EAAOkC,mBACP,MACF,IAAK,gBACHlC,EAAOmC,WAAPnC,EAAOmC,YACP,MACF,IAAK,qBACHnC,EAAOoC,eAAPpC,EAAOoC,cAAgBZ,EAAKV,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOqC,oBAAPrC,EAAOqC,mBAAqBb,EAAKV,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOsC,0BAAPtC,EAAOsC,yBAA2Bd,EAAKV,SACvC,MACF,IAAK,iBACuC,MAA1Cd,EAAOuC,qCAAPvC,EAAOuC,oCAAsCf,EAAKV,SAClD,MACF,IAAK,YACqB,MAAxBd,EAAOwC,mBAAPxC,EAAOwC,kBAAoBhB,EAAKV,SAChC,MACF,IAAK,QACHd,MAAAA,EAAOyC,SAAPzC,EAAOyC,QAAUjB,EAAKV,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACAwC,aAAc,CACZC,OAAQ,CAAEnC,KAAAA,GACVc,UAAAA,EACAsB,UAAWxB,EACXyB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,GAE7BzC,YAAAA,EACA0C,aAAc,WAAM,OAAArC,EAAK,gBAAgB,EACzCsC,WAAY,kBAAMtC,EAAK,cAAc,EACrCuC,YAAa,SAACC,GAAY,OAAKxC,EAAK,eAAgB,CAAEwC,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAAzC,EAAK,aAAa,EAC1C0C,WAAY,kBAAM1C,EAAK,cAAc,EACrC2C,YAAa,WAAM,OAAA3C,EAAK,eAAe,EAE3C,CAOS4C,CAAUxD,EAASC,EAAQC,EACpC,kCASqCuD,OAEnCC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAhD,EAAY8C,EAAZ9C,aAGMiD,EAAQ7D,EANP0D,EAAPzD,mJAIS6D,CAAAJ,EAAA3D,GAEsC,CAAEa,aAAAA,iBAEjD,OACEmD,wBAACC,EAAOA,QAAAzC,EAAA,CACN0C,IAAKJ,EAAMzD,YACPyD,EAAMjB,aACNgB,EAAY,CAChBD,MAAOA,IAGb"}
1
+ {"version":3,"file":"lib.umd.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n console.log(\"Initializing conversation bridge with options:\", options);\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: () => {\n console.log(\"WebView loaded, initializing bridge...\");\n init();\n },\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}"],"names":["useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","console","log","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"+sBA8TgBA,EACdC,EACAC,EACAC,GAEA,OAzHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,YAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAAA,QACX,u6BAAgBR,EAAOS,cA1JzB,8DAgC+D,wwHA0HC,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAAA,YAAY,SAACC,EAAcC,GAAiB,IAAAC,EAEvDA,OAAAA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAAA,YAAY,WACnBN,IACJe,QAAQC,IAAI,iDAAkDvB,GAC9DY,EAAK,OAAMY,EAAA,CAAA,EACNxB,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBa,EAAYZ,EAAWA,YAC3B,SAACa,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOR,KAAKS,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKb,MACX,IAAK,gBACkB,MAArBb,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKZ,SAC7B,MACF,IAAK,UACHd,MAAAA,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKZ,SACxB,MACF,IAAK,mBACHd,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKZ,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKZ,SACxB,MACF,IAAK,cACHd,MAAAA,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKZ,SAC5B,MACF,IAAK,uBACHd,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,eACiB,MAApBlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,UACa,MAAhBpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,eACHrC,MAAAA,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKZ,SAC5B,MACF,IAAK,oBACHd,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKZ,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKZ,SACvC,MACF,IAAK,iBACHd,MAAAA,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKZ,SAClD,MACF,IAAK,kBACHd,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKZ,SAChC,MACF,IAAK,QACW,MAAdd,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKZ,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACA0C,aAAc,CACZC,OAAQ,CAAErC,KAAAA,GACVgB,UAAAA,EACAsB,UAAW,WACTzB,QAAQC,IAAI,0CACZF,GACF,EACA2B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC9C,YAAAA,EACA+C,aAAc,WAAF,OAAQ1C,EAAK,gBAAgB,EACzC2C,WAAY,kBAAM3C,EAAK,cAAc,EACrC4C,YAAa,SAACC,GAAY,OAAK7C,EAAK,eAAgB,CAAE6C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAM9C,EAAK,aAAa,EAC1C+C,WAAY,WAAM,OAAA/C,EAAK,cAAc,EACrCgD,YAAa,WAAF,OAAQhD,EAAK,eAAe,EAE3C,CAOSiD,CAAU7D,EAASC,EAAQC,EACpC,yBASgB,SAAqB4D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACArD,EAAYmD,EAAZnD,aAGMsD,EAAQlE,EANP+D,EAAP9D,mJAISkE,CAAAJ,EAAAK,GAEsC,CAAExD,aAAAA,iBAEjD,OACEyD,wBAACC,UAAO7C,GACN8C,IAAKL,EAAM9D,YACP8D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@unith-ai/react-native",
3
- "version": "0.0.1",
3
+ "version": "0.0.3",
4
4
  "description": "React Native WebView wrapper for Unith AI digital humans",
5
5
  "main": "./dist/lib.js",
6
6
  "module": "./dist/lib.module.js",