@unith-ai/react-native 0.0.10 → 0.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +6 -5
- package/dist/index.d.ts.map +1 -1
- package/dist/lib.js +1 -1
- package/dist/lib.js.map +1 -1
- package/dist/lib.modern.mjs +1 -1
- package/dist/lib.modern.mjs.map +1 -1
- package/dist/lib.module.js +1 -1
- package/dist/lib.module.js.map +1 -1
- package/dist/lib.umd.js +1 -1
- package/dist/lib.umd.js.map +1 -1
- package/package.json +1 -1
package/dist/index.d.ts
CHANGED
|
@@ -1,15 +1,13 @@
|
|
|
1
|
+
import { Status as StatusType, Mode as ModeType, MicrophoneStatus as MicrophoneStatusType } from "@unith-ai/core-client";
|
|
1
2
|
import React from "react";
|
|
2
3
|
import { ViewStyle } from "react-native";
|
|
3
4
|
export type ConversationOptions = {
|
|
4
5
|
orgId: string;
|
|
5
6
|
headId: string;
|
|
6
7
|
apiKey: string;
|
|
7
|
-
mode?: string;
|
|
8
8
|
language?: string;
|
|
9
9
|
username?: string;
|
|
10
|
-
|
|
11
|
-
fadeTransitionsType?: string;
|
|
12
|
-
microphoneProvider?: "azure" | "eleven_labs";
|
|
10
|
+
microphoneProvider?: "azure" | "eleven_labs" | "custom";
|
|
13
11
|
};
|
|
14
12
|
export type ConversationEvents = {
|
|
15
13
|
onStatusChange?: (prop: {
|
|
@@ -65,12 +63,15 @@ export type UseConversationResult = {
|
|
|
65
63
|
toggleMute: () => void;
|
|
66
64
|
keepSession: () => void;
|
|
67
65
|
};
|
|
68
|
-
export declare function useConversation(options: ConversationOptions, events?: ConversationEvents
|
|
66
|
+
export declare function useConversation(options: ConversationOptions, events?: ConversationEvents): UseConversationResult;
|
|
69
67
|
export type UnithConversationViewProps = ConversationEvents & {
|
|
70
68
|
options: ConversationOptions;
|
|
71
69
|
style?: ViewStyle;
|
|
72
70
|
webviewProps?: Record<string, any>;
|
|
73
71
|
webClientUrl?: string;
|
|
74
72
|
};
|
|
73
|
+
export type Status = StatusType;
|
|
74
|
+
export type Mode = ModeType;
|
|
75
|
+
export type MicrophoneStatus = MicrophoneStatusType;
|
|
75
76
|
export declare function UnithConversationView({ options, style, webviewProps, webClientUrl, ...events }: UnithConversationViewProps): React.JSX.Element;
|
|
76
77
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,IAAI,UAAU,EAAE,IAAI,IAAI,QAAQ,EAAE,gBAAgB,IAAI,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AACzH,OAAO,KAAwC,MAAM,OAAO,CAAC;AAC7D,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAGzC,MAAM,MAAM,mBAAmB,GAAG;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,kBAAkB,CAAC,EAAE,OAAO,GAAG,aAAa,GAAG,QAAQ,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG;IAC/B,cAAc,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACpD,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,GAAG,CAAC;QAAC,gBAAgB,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACzF,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IACnC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAChC,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,WAAW,EAAE,MAAM,EAAE,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,gBAAgB,CAAC,EAAE,MAAM,IAAI,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,IAAI,CAAC;IACvB,kBAAkB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,eAAe,CAAC,EAAE,MAAM,IAAI,CAAC;IAC7B,aAAa,CAAC,EAAE,MAAM,IAAI,CAAC;IAC3B,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACrD,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,eAAe,EAAE,OAAO,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACtF,iBAAiB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACxD,wBAAwB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,IAAI,GAAG,KAAK,GAAG,YAAY,CAAA;KAAE,KAAK,IAAI,CAAC;IACnF,mCAAmC,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;CAC9E,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG;IAClC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACjC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,IAAI,CAAC;IACzB,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,MAAM,IAAI,CAAC;CACzB,CAAC;AA4QF,wBAAgB,eAAe,CAC7B,OAAO,EAAE,mBAAmB,EAC5B,MAAM,CAAC,EAAE,kBAAkB,GAC1B,qBAAqB,CAEvB;AAED,MAAM,MAAM,0BAA0B,GAAG,kBAAkB,GAAG;IAC5D,OAAO,EAAE,mBAAmB,CAAC;IAC7B,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACnC,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAGF,MAAM,MAAM,MAAM,GAAG,UAAU,CAAC;AAChC,MAAM,MAAM,IAAI,GAAG,QAAQ,CAAC;AAC5B,MAAM,MAAM,gBAAgB,GAAG,oBAAoB,CAAC;AAGpD,wBAAgB,qBAAqB,CAAC,EACpC,OAAO,EACP,KAAK,EACL,YAAY,EACZ,YAAY,EACZ,GAAG,MAAM,EACV,EAAE,0BAA0B,qBAW5B"}
|
package/dist/lib.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
var n=require("react"),e=require("react-native-webview");function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var t=/*#__PURE__*/a(n);function o(){return o=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},o.apply(null,arguments)}var
|
|
1
|
+
var n=require("react"),e=require("react-native-webview");function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var t=/*#__PURE__*/a(n);function o(){return o=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},o.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function r(e,a){return function(e,a){void 0===a&&(a={});var t=n.useRef(null),s=n.useRef(!1),r=n.useState(!1),i=r[0],c=r[1],d=n.useCallback(function(n,e){var a;null==(a=t.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),l=n.useCallback(function(){s.current||(s.current=!0,c(!0),setTimeout(function(){d("INIT",o({},e))},100))},[e,d]),u=n.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:t,webViewProps:{source:{html:'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>',baseUrl:"https://unpkg.com"},onMessage:u,onLoadEnd:l,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:i,startSession:function(){return d("START_SESSION")},endSession:function(){return d("END_SESSION")},sendMessage:function(n){return d("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return d("TOGGLE_MIC")},toggleMute:function(){return d("TOGGLE_MUTE")},keepSession:function(){return d("KEEP_SESSION")}}}(e,a)}exports.UnithConversationView=function(n){var a=n.style,i=n.webviewProps,c=r(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,s));/*#__PURE__*/return t.default.createElement(e.WebView,o({ref:c.webViewRef},c.webViewProps,i,{style:a}))},exports.useConversation=r;
|
|
2
2
|
//# sourceMappingURL=lib.js.map
|
package/dist/lib.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lib.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\"\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n webViewBaseUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n \n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n return;\n }\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n // case \"WEB_LOG\":\n // if (typeof console !== \"undefined\") {\n // const level = data.payload?.level || \"log\";\n // const args = Array.isArray(data.payload?.args)\n // ? data.payload.args\n // : [data.payload?.args];\n // const fn = (console as any)[level] || console.log;\n // fn(...args);\n // }\n // break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html, baseUrl: bridge.webViewBaseUrl || \"https://unpkg.com\" },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n // allowsFileAccess: true,\n // domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["useConversation","options","events","bridge","webViewRef","useRef","initializedRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","baseUrl","webViewBaseUrl","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"kbA2UgBA,EACdC,EACAC,EACAC,GAEA,OAtIF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MACzBC,EAAiBD,UAAO,GAC9BE,EAAsCC,EAAAA,UAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAAA,QACX,y6BAAgBT,EAAOU,cA3JzB,wEAgC+D,ywHA2HC,EAC9D,CAACV,EAAOU,eAGJC,EAAOC,EAAAA,YAAY,SAACC,EAAcC,GAAiB,IAAAC,EAEvDA,OAAAA,EAAAd,EAAWe,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAgBR,EAAWA,YAAC,WAC5BT,EAAea,UAInBb,EAAea,SAAU,EACzBT,GAAe,GAGfc,WAAW,WACTV,EAAK,OAAMW,EAAA,GAAOxB,GACpB,EAAG,KACL,EAAG,CAACA,EAASa,IAEPY,EAAYX,EAAAA,YAChB,SAACY,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKZ,MACX,IAAK,sBACHd,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKX,SAC7B,MACF,IAAK,UACa,MAAhBf,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKX,SACxB,MACF,IAAK,aACHf,MAAAA,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKX,SAC3B,MACF,IAAK,gBACHf,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKX,SACxB,MACF,IAAK,cACiB,MAApBf,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKX,SAC5B,MACF,IAAK,iBACHf,MAAAA,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,qBACHlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,gBACHpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,eACiB,MAApBrC,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKX,SAC5B,MACF,IAAK,cACHf,MAAAA,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKX,SACjC,MACF,IAAK,mBACHf,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKX,SACvC,MACF,IAAK,iBACuC,MAA1Cf,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKX,SAClD,MACF,IAAK,YACHf,MAAAA,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKX,SAChC,MAWF,IAAK,cACHf,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKX,SAK5B,EACA,CAACf,IAGH,MAAO,CACLE,WAAAA,EACA0C,aAAc,CACZC,OAAQ,CAAEpC,KAAAA,EAAMqC,QAAS7C,EAAO8C,gBAAkB,qBAClDvB,UAAAA,EACAwB,UAAW3B,EACX4B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAG3BC,gCAAiC,SAEnC7C,YAAAA,EACA8C,aAAc,WAAF,OAAQzC,EAAK,gBAAgB,EACzC0C,WAAY,kBAAM1C,EAAK,cAAc,EACrC2C,YAAa,SAACC,GAAY,OAAK5C,EAAK,eAAgB,CAAE4C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAM7C,EAAK,aAAa,EAC1C8C,WAAY,WAAM,OAAA9C,EAAK,cAAc,EACrC+C,YAAa,WAAF,OAAQ/C,EAAK,eAAe,EAE3C,CAOSgD,CAAU7D,EAASC,EAAQC,EACpC,+BAUgB,SAAqB4D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACApD,EAAYkD,EAAZlD,aAGMqD,EAAQlE,EANP+D,EAAP9D,mJAISkE,CAAAJ,EAAAK,GAEsC,CAAEvD,aAAAA,iBAEjD,OACEwD,wBAACC,UAAO7C,GACN8C,IAAKL,EAAM9D,YACP8D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
|
|
1
|
+
{"version":3,"file":"lib.js","sources":["../src/index.tsx"],"sourcesContent":["import { Status as StatusType, Mode as ModeType, MicrophoneStatus as MicrophoneStatusType } from \"@unith-ai/core-client\";\nimport React, { useCallback, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n language?: string;\n username?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n webViewBaseUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = buildHtml(DEFAULT_WEB_CLIENT_URL)\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n return;\n }\n\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html, baseUrl: \"https://unpkg.com\" },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n): UseConversationResult {\n return useBridge(options, events,);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\n\nexport type Status = StatusType;\nexport type Mode = ModeType;\nexport type MicrophoneStatus = MicrophoneStatusType;\n\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events);\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","webViewRef","useRef","initializedRef","_useState","useState","initialized","setInitialized","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","html","baseUrl","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"iXACA,IAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,gBA0TgB,SAAAC,EACdC,EACAC,GAEA,OAxHF,SACED,EACAC,YAAAA,IAAAA,EAA6B,CAAA,GAE7B,IAAMC,EAAaC,EAAMA,OAAM,MACzBC,EAAiBD,UAAO,GAC9BE,EAAsCC,EAAAA,UAAS,GAAxCC,EAAWF,KAAEG,EAAcH,EAElC,GAEMI,EAAOC,cAAY,SAACC,EAAcC,GAAiB,IAAAC,EAErC,OAAlBA,EAAAX,EAAWY,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAgBR,EAAWA,YAAC,WAC5BN,EAAeU,UAKnBV,EAAeU,SAAU,EACzBN,GAAe,GAGfW,WAAW,WACTV,EAAK,OAAMW,EAAA,CAAA,EAAOpB,GACpB,EAAG,KACL,EAAG,CAACA,EAASS,IAEPY,EAAYX,EAAAA,YAChB,SAACY,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKZ,MACX,IAAK,sBACHV,EAAO0B,gBAAP1B,EAAO0B,eAAiBJ,EAAKX,SAC7B,MACF,IAAK,gBACHX,EAAO2B,WAAP3B,EAAO2B,UAAYL,EAAKX,SACxB,MACF,IAAK,mBACHX,EAAO4B,cAAP5B,EAAO4B,aAAeN,EAAKX,SAC3B,MACF,IAAK,gBACHX,EAAOoB,WAAPpB,EAAOoB,UAAYE,EAAKX,SACxB,MACF,IAAK,cACiB,MAApBX,EAAO6B,eAAP7B,EAAO6B,cAAgBP,EAAKX,SAC5B,MACF,IAAK,iBACmB,MAAtBX,EAAO8B,iBAAP9B,EAAO8B,kBACP,MACF,IAAK,eACH9B,MAAAA,EAAO+B,eAAP/B,EAAO+B,gBACP,MACF,IAAK,wBACH/B,EAAOgC,kBAAPhC,EAAOgC,mBACP,MACF,IAAK,gBACHhC,EAAOiC,WAAPjC,EAAOiC,YACP,MACF,IAAK,eACiB,MAApBjC,EAAOkC,eAAPlC,EAAOkC,cAAgBZ,EAAKX,SAC5B,MACF,IAAK,cACsB,MAAzBX,EAAOmC,oBAAPnC,EAAOmC,mBAAqBb,EAAKX,SACjC,MACF,IAAK,aAC4B,MAA/BX,EAAOoC,0BAAPpC,EAAOoC,yBAA2Bd,EAAKX,SACvC,MACF,IAAK,iBACuC,MAA1CX,EAAOqC,qCAAPrC,EAAOqC,oCAAsCf,EAAKX,SAClD,MACF,IAAK,YACHX,MAAAA,EAAOsC,mBAAPtC,EAAOsC,kBAAoBhB,EAAKX,SAChC,MACF,IAAK,QACHX,MAAAA,EAAOuC,SAAPvC,EAAOuC,QAAUjB,EAAKX,SAK5B,EACA,CAACX,IAGH,MAAO,CACLC,WAAAA,EACAuC,aAAc,CACZC,OAAQ,CAAEC,ouJAAMC,QAAS,qBACzBvB,UAAAA,EACAwB,UAAW3B,EACX4B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC5C,YAAAA,EACA6C,aAAc,WAAF,OAAQ3C,EAAK,gBAAgB,EACzC4C,WAAY,kBAAM5C,EAAK,cAAc,EACrC6C,YAAa,SAACC,UAAiB9C,EAAK,eAAgB,CAAE8C,KAAAA,GAAO,EAC7DC,iBAAkB,WAAF,OAAQ/C,EAAK,aAAa,EAC1CgD,WAAY,kBAAMhD,EAAK,cAAc,EACrCiD,YAAa,WAAM,OAAAjD,EAAK,eAAe,EAE3C,CAMSkD,CAAU3D,EAASC,EAC5B,+BAegB,SAAqB2D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aAIMC,EAAQhE,EANP6D,EAAP5D,mJAISgE,CAAAJ,EAAA9D,iBAIT,OACEmE,wBAACC,EAAOA,QAAA9C,EAAA,CACN+C,IAAKJ,EAAM7D,YACP6D,EAAMtB,aACNqB,EAAY,CAChBD,MAAOA,IAGb"}
|
package/dist/lib.modern.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import n,{useRef as e,useState as a,
|
|
1
|
+
import n,{useRef as e,useState as a,useCallback as t}from"react";import{WebView as o}from"react-native-webview";function s(){return s=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},s.apply(null,arguments)}const r=["options","style","webviewProps","webClientUrl"];function i(n,o){return function(n,o={}){const r=e(null),i=e(!1),[d,c]=a(!1),l=t((n,e)=>{var a;null==(a=r.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),S=t(()=>{i.current||(i.current=!0,c(!0),setTimeout(()=>{l("INIT",s({},n))},100))},[n,l]),p=t(n=>{let e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==o.onStatusChange||o.onStatusChange(e.payload);break;case"CONNECT":null==o.onConnect||o.onConnect(e.payload);break;case"DISCONNECT":null==o.onDisconnect||o.onDisconnect(e.payload);break;case"MESSAGE":null==o.onMessage||o.onMessage(e.payload);break;case"SUGGESTIONS":null==o.onSuggestions||o.onSuggestions(e.payload);break;case"SPEAKING_START":null==o.onSpeakingStart||o.onSpeakingStart();break;case"SPEAKING_END":null==o.onSpeakingEnd||o.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==o.onTimeoutWarning||o.onTimeoutWarning();break;case"TIMEOUT":null==o.onTimeout||o.onTimeout();break;case"KEEP_SESSION":null==o.onKeepSession||o.onKeepSession(e.payload);break;case"MUTE_STATUS":null==o.onMuteStatusChange||o.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==o.onMicrophoneStatusChange||o.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==o.onMicrophoneSpeechRecognitionResult||o.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==o.onMicrophoneError||o.onMicrophoneError(e.payload);break;case"ERROR":null==o.onError||o.onError(e.payload)}},[o]);return{webViewRef:r,webViewProps:{source:{html:'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>',baseUrl:"https://unpkg.com"},onMessage:p,onLoadEnd:S,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:d,startSession:()=>l("START_SESSION"),endSession:()=>l("END_SESSION"),sendMessage:n=>l("SEND_MESSAGE",{text:n}),toggleMicrophone:()=>l("TOGGLE_MIC"),toggleMute:()=>l("TOGGLE_MUTE"),keepSession:()=>l("KEEP_SESSION")}}(n,o)}function d(e){let{options:a,style:t,webviewProps:d}=e;const c=i(a,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,r));/*#__PURE__*/return n.createElement(o,s({ref:c.webViewRef},c.webViewProps,d,{style:t}))}export{d as UnithConversationView,i as useConversation};
|
|
2
2
|
//# sourceMappingURL=lib.modern.mjs.map
|
package/dist/lib.modern.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lib.modern.mjs","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\"\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n webViewBaseUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n \n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n return;\n }\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n // case \"WEB_LOG\":\n // if (typeof console !== \"undefined\") {\n // const level = data.payload?.level || \"log\";\n // const args = Array.isArray(data.payload?.args)\n // ? data.payload.args\n // : [data.payload?.args];\n // const fn = (console as any)[level] || console.log;\n // fn(...args);\n // }\n // break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html, baseUrl: bridge.webViewBaseUrl || \"https://unpkg.com\" },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n // allowsFileAccess: true,\n // domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","initializedRef","initialized","setInitialized","useState","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","baseUrl","webViewBaseUrl","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,MAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,gBA2UgB,SAAAC,EACdC,EACAC,EACAC,GAEA,OAtIF,SACEF,EACAC,EAA6B,CAAA,EAC7BC,EAAwB,IAExB,MAAMC,EAAaC,EAAY,MACzBC,EAAiBD,GAAO,IACvBE,EAAaC,GAAkBC,GAAS,GAEzCC,EAAOC,EACX,IAxJK,u5BAwJWR,EAAOS,cA3JzB,g1HA4JE,CAACT,EAAOS,eAGJC,EAAOC,EAAY,CAACC,EAAcC,KAAiB,IAAAC,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,OAAMC,cAEtC,IAEGM,EAAgBR,EAAY,KAC5BR,EAAeY,UAInBZ,EAAeY,SAAU,EACzBV,GAAe,GAGfe,WAAW,KACTV,EAAK,OAAMW,EAAOvB,CAAAA,EAAAA,KACjB,OACF,CAACA,EAASY,IAEPY,EAAYX,EACfY,IACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKZ,MACX,IAAK,gBACHb,MAAAA,EAAO6B,gBAAP7B,EAAO6B,eAAiBJ,EAAKX,SAC7B,MACF,IAAK,gBACHd,EAAO8B,WAAP9B,EAAO8B,UAAYL,EAAKX,SACxB,MACF,IAAK,aACHd,MAAAA,EAAO+B,cAAP/B,EAAO+B,aAAeN,EAAKX,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOuB,WAAPvB,EAAOuB,UAAYE,EAAKX,SACxB,MACF,IAAK,cACHd,MAAAA,EAAOgC,eAAPhC,EAAOgC,cAAgBP,EAAKX,SAC5B,MACF,IAAK,iBACmB,MAAtBd,EAAOiC,iBAAPjC,EAAOiC,kBACP,MACF,IAAK,eACHjC,MAAAA,EAAOkC,eAAPlC,EAAOkC,gBACP,MACF,IAAK,kBACHlC,MAAAA,EAAOmC,kBAAPnC,EAAOmC,mBACP,MACF,IAAK,gBACHnC,EAAOoC,WAAPpC,EAAOoC,YACP,MACF,IAAK,eACiB,MAApBpC,EAAOqC,eAAPrC,EAAOqC,cAAgBZ,EAAKX,SAC5B,MACF,IAAK,oBACHd,EAAOsC,oBAAPtC,EAAOsC,mBAAqBb,EAAKX,SACjC,MACF,IAAK,aACHd,MAAAA,EAAOuC,0BAAPvC,EAAOuC,yBAA2Bd,EAAKX,SACvC,MACF,IAAK,iBACuC,MAA1Cd,EAAOwC,qCAAPxC,EAAOwC,oCAAsCf,EAAKX,SAClD,MACF,IAAK,YACHd,MAAAA,EAAOyC,mBAAPzC,EAAOyC,kBAAoBhB,EAAKX,SAChC,MAWF,IAAK,cACHd,EAAO0C,SAAP1C,EAAO0C,QAAUjB,EAAKX,WAM5B,CAACd,IAGH,MAAO,CACLE,aACAyC,aAAc,CACZC,OAAQ,CAAEpC,OAAMqC,QAAS5C,EAAO6C,gBAAkB,qBAClDvB,YACAwB,UAAW3B,EACX4B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAG3BC,gCAAiC,SAEnC9C,cACA+C,aAAcA,IAAMzC,EAAK,iBACzB0C,WAAYA,IAAM1C,EAAK,eACvB2C,YAAcC,GAAiB5C,EAAK,eAAgB,CAAE4C,SACtDC,iBAAkBA,IAAM7C,EAAK,cAC7B8C,WAAYA,IAAM9C,EAAK,eACvB+C,YAAaA,IAAM/C,EAAK,gBAE5B,CAOSgD,CAAU5D,EAASC,EAAQC,EACpC,UAUgB2D,EAAqBC,GAAC,IAAA9D,QACpCA,EAAO+D,MACPA,EAAKC,aACLA,EAAYrD,aACZA,GAE2BmD,EAC3B,MAAMG,EAAQlE,EAAgBC,6IAFrBkE,CAAAJ,EAAAhE,GAEsC,CAAEa,8BAEjD,OACEwD,gBAACC,EAAO7C,GACN8C,IAAKJ,EAAM9D,YACP8D,EAAMrB,aACNoB,EAAY,CAChBD,MAAOA,IAGb"}
|
|
1
|
+
{"version":3,"file":"lib.modern.mjs","sources":["../src/index.tsx"],"sourcesContent":["import { Status as StatusType, Mode as ModeType, MicrophoneStatus as MicrophoneStatusType } from \"@unith-ai/core-client\";\nimport React, { useCallback, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n language?: string;\n username?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n webViewBaseUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = buildHtml(DEFAULT_WEB_CLIENT_URL)\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n return;\n }\n\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html, baseUrl: \"https://unpkg.com\" },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n): UseConversationResult {\n return useBridge(options, events,);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\n\nexport type Status = StatusType;\nexport type Mode = ModeType;\nexport type MicrophoneStatus = MicrophoneStatusType;\n\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events);\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","webViewRef","useRef","initializedRef","initialized","setInitialized","useState","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","html","baseUrl","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"wUACA,MAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,gBA0TgB,SAAAC,EACdC,EACAC,GAEA,OAxHF,SACED,EACAC,EAA6B,CAAA,GAE7B,MAAMC,EAAaC,EAAY,MACzBC,EAAiBD,GAAO,IACvBE,EAAaC,GAAkBC,GAAS,GAIzCC,EAAOC,EAAY,CAACC,EAAcC,KAAiBC,IAAAA,EAErC,OAAlBA,EAAAV,EAAWW,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,OAAMC,cAEtC,IAEGM,EAAgBR,EAAY,KAC5BL,EAAeS,UAKnBT,EAAeS,SAAU,EACzBP,GAAe,GAGfY,WAAW,KACTV,EAAK,OAAMW,KAAOnB,KACjB,OACF,CAACA,EAASQ,IAEPY,EAAYX,EACfY,IACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKZ,MACX,IAAK,gBACHT,MAAAA,EAAOyB,gBAAPzB,EAAOyB,eAAiBJ,EAAKX,SAC7B,MACF,IAAK,gBACHV,EAAO0B,WAAP1B,EAAO0B,UAAYL,EAAKX,SACxB,MACF,IAAK,aACgB,MAAnBV,EAAO2B,cAAP3B,EAAO2B,aAAeN,EAAKX,SAC3B,MACF,IAAK,UACa,MAAhBV,EAAOmB,WAAPnB,EAAOmB,UAAYE,EAAKX,SACxB,MACF,IAAK,cACHV,MAAAA,EAAO4B,eAAP5B,EAAO4B,cAAgBP,EAAKX,SAC5B,MACF,IAAK,iBACHV,MAAAA,EAAO6B,iBAAP7B,EAAO6B,kBACP,MACF,IAAK,eACiB,MAApB7B,EAAO8B,eAAP9B,EAAO8B,gBACP,MACF,IAAK,wBACH9B,EAAO+B,kBAAP/B,EAAO+B,mBACP,MACF,IAAK,UACa,MAAhB/B,EAAOgC,WAAPhC,EAAOgC,YACP,MACF,IAAK,eACHhC,MAAAA,EAAOiC,eAAPjC,EAAOiC,cAAgBZ,EAAKX,SAC5B,MACF,IAAK,oBACHV,EAAOkC,oBAAPlC,EAAOkC,mBAAqBb,EAAKX,SACjC,MACF,IAAK,aAC4B,MAA/BV,EAAOmC,0BAAPnC,EAAOmC,yBAA2Bd,EAAKX,SACvC,MACF,IAAK,iBACuC,MAA1CV,EAAOoC,qCAAPpC,EAAOoC,oCAAsCf,EAAKX,SAClD,MACF,IAAK,YACHV,MAAAA,EAAOqC,mBAAPrC,EAAOqC,kBAAoBhB,EAAKX,SAChC,MACF,IAAK,QACHV,MAAAA,EAAOsC,SAAPtC,EAAOsC,QAAUjB,EAAKX,WAM5B,CAACV,IAGH,MAAO,CACLC,aACAsC,aAAc,CACZC,OAAQ,CAAEC,KA7OP,+tJA6OaC,QAAS,qBACzBvB,YACAwB,UAAW3B,EACX4B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC7C,cACA8C,aAAcA,IAAM3C,EAAK,iBACzB4C,WAAYA,IAAM5C,EAAK,eACvB6C,YAAcC,GAAiB9C,EAAK,eAAgB,CAAE8C,SACtDC,iBAAkBA,IAAM/C,EAAK,cAC7BgD,WAAYA,IAAMhD,EAAK,eACvBiD,YAAaA,IAAMjD,EAAK,gBAE5B,CAMSkD,CAAU1D,EAASC,EAC5B,CAegB,SAAA0D,EAAqBC,GAMR,IANS5D,QACpCA,EAAO6D,MACPA,EAAKC,aACLA,GAG2BF,EAC3B,MAAMG,EAAQhE,EAAgBC,6IAFrBgE,CAAAJ,EAAA9D,iBAIT,OACEmE,gBAACC,EAAO/C,GACNgD,IAAKJ,EAAM7D,YACP6D,EAAMvB,aACNsB,EAAY,CAChBD,MAAOA,IAGb"}
|
package/dist/lib.module.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import n,{useRef as e,useState as a,
|
|
1
|
+
import n,{useRef as e,useState as a,useCallback as t}from"react";import{WebView as o}from"react-native-webview";function r(){return r=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},r.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function i(n,o){return function(n,o){void 0===o&&(o={});var s=e(null),i=e(!1),c=a(!1),d=c[0],l=c[1],u=t(function(n,e){var a;null==(a=s.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),S=t(function(){i.current||(i.current=!0,l(!0),setTimeout(function(){u("INIT",r({},n))},100))},[n,u]),p=t(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==o.onStatusChange||o.onStatusChange(e.payload);break;case"CONNECT":null==o.onConnect||o.onConnect(e.payload);break;case"DISCONNECT":null==o.onDisconnect||o.onDisconnect(e.payload);break;case"MESSAGE":null==o.onMessage||o.onMessage(e.payload);break;case"SUGGESTIONS":null==o.onSuggestions||o.onSuggestions(e.payload);break;case"SPEAKING_START":null==o.onSpeakingStart||o.onSpeakingStart();break;case"SPEAKING_END":null==o.onSpeakingEnd||o.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==o.onTimeoutWarning||o.onTimeoutWarning();break;case"TIMEOUT":null==o.onTimeout||o.onTimeout();break;case"KEEP_SESSION":null==o.onKeepSession||o.onKeepSession(e.payload);break;case"MUTE_STATUS":null==o.onMuteStatusChange||o.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==o.onMicrophoneStatusChange||o.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==o.onMicrophoneSpeechRecognitionResult||o.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==o.onMicrophoneError||o.onMicrophoneError(e.payload);break;case"ERROR":null==o.onError||o.onError(e.payload)}},[o]);return{webViewRef:s,webViewProps:{source:{html:'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>',baseUrl:"https://unpkg.com"},onMessage:p,onLoadEnd:S,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:d,startSession:function(){return u("START_SESSION")},endSession:function(){return u("END_SESSION")},sendMessage:function(n){return u("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return u("TOGGLE_MIC")},toggleMute:function(){return u("TOGGLE_MUTE")},keepSession:function(){return u("KEEP_SESSION")}}}(n,o)}function c(e){var a=e.style,t=e.webviewProps,c=i(e.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,s));/*#__PURE__*/return n.createElement(o,r({ref:c.webViewRef},c.webViewProps,t,{style:a}))}export{c as UnithConversationView,i as useConversation};
|
|
2
2
|
//# sourceMappingURL=lib.module.js.map
|
package/dist/lib.module.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lib.module.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\"\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n webViewBaseUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n \n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n return;\n }\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n // case \"WEB_LOG\":\n // if (typeof console !== \"undefined\") {\n // const level = data.payload?.level || \"log\";\n // const args = Array.isArray(data.payload?.args)\n // ? data.payload.args\n // : [data.payload?.args];\n // const fn = (console as any)[level] || console.log;\n // fn(...args);\n // }\n // break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html, baseUrl: bridge.webViewBaseUrl || \"https://unpkg.com\" },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n // allowsFileAccess: true,\n // domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["useConversation","options","events","bridge","webViewRef","useRef","initializedRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","baseUrl","webViewBaseUrl","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"sZA2UgBA,EACdC,EACAC,EACAC,GAEA,OAtIF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAY,MACzBC,EAAiBD,GAAO,GAC9BE,EAAsCC,GAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EACX,y6BAAgBT,EAAOU,cA3JzB,wEAgC+D,ywHA2HC,EAC9D,CAACV,EAAOU,eAGJC,EAAOC,EAAY,SAACC,EAAcC,GAAiB,IAAAC,EAEvDA,OAAAA,EAAAd,EAAWe,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAgBR,EAAY,WAC5BT,EAAea,UAInBb,EAAea,SAAU,EACzBT,GAAe,GAGfc,WAAW,WACTV,EAAK,OAAMW,EAAA,GAAOxB,GACpB,EAAG,KACL,EAAG,CAACA,EAASa,IAEPY,EAAYX,EAChB,SAACY,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKZ,MACX,IAAK,sBACHd,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKX,SAC7B,MACF,IAAK,UACa,MAAhBf,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKX,SACxB,MACF,IAAK,aACHf,MAAAA,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKX,SAC3B,MACF,IAAK,gBACHf,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKX,SACxB,MACF,IAAK,cACiB,MAApBf,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKX,SAC5B,MACF,IAAK,iBACHf,MAAAA,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,qBACHlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,gBACHpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,eACiB,MAApBrC,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKX,SAC5B,MACF,IAAK,cACHf,MAAAA,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKX,SACjC,MACF,IAAK,mBACHf,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKX,SACvC,MACF,IAAK,iBACuC,MAA1Cf,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKX,SAClD,MACF,IAAK,YACHf,MAAAA,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKX,SAChC,MAWF,IAAK,cACHf,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKX,SAK5B,EACA,CAACf,IAGH,MAAO,CACLE,WAAAA,EACA0C,aAAc,CACZC,OAAQ,CAAEpC,KAAAA,EAAMqC,QAAS7C,EAAO8C,gBAAkB,qBAClDvB,UAAAA,EACAwB,UAAW3B,EACX4B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAG3BC,gCAAiC,SAEnC7C,YAAAA,EACA8C,aAAc,WAAF,OAAQzC,EAAK,gBAAgB,EACzC0C,WAAY,kBAAM1C,EAAK,cAAc,EACrC2C,YAAa,SAACC,GAAY,OAAK5C,EAAK,eAAgB,CAAE4C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAM7C,EAAK,aAAa,EAC1C8C,WAAY,WAAM,OAAA9C,EAAK,cAAc,EACrC+C,YAAa,WAAF,OAAQ/C,EAAK,eAAe,EAE3C,CAOSgD,CAAU7D,EAASC,EAAQC,EACpC,CAUgB,SAAA4D,EAAqBC,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACArD,EAAYmD,EAAZnD,aAGMsD,EAAQnE,EANPgE,EAAP/D,mJAISmE,CAAAJ,EAAAK,GAEsC,CAAExD,aAAAA,iBAEjD,OACEyD,gBAACC,EAAO9C,GACN+C,IAAKL,EAAM/D,YACP+D,EAAMrB,aACNoB,GACJD,MAAOA,IAGb"}
|
|
1
|
+
{"version":3,"file":"lib.module.js","sources":["../src/index.tsx"],"sourcesContent":["import { Status as StatusType, Mode as ModeType, MicrophoneStatus as MicrophoneStatusType } from \"@unith-ai/core-client\";\nimport React, { useCallback, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n language?: string;\n username?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n webViewBaseUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = buildHtml(DEFAULT_WEB_CLIENT_URL)\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n return;\n }\n\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html, baseUrl: \"https://unpkg.com\" },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n): UseConversationResult {\n return useBridge(options, events,);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\n\nexport type Status = StatusType;\nexport type Mode = ModeType;\nexport type MicrophoneStatus = MicrophoneStatusType;\n\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events);\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","webViewRef","useRef","initializedRef","_useState","useState","initialized","setInitialized","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","html","baseUrl","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"wUACA,IAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,gBA0TgB,SAAAC,EACdC,EACAC,GAEA,OAxHF,SACED,EACAC,YAAAA,IAAAA,EAA6B,CAAA,GAE7B,IAAMC,EAAaC,EAAY,MACzBC,EAAiBD,GAAO,GAC9BE,EAAsCC,GAAS,GAAxCC,EAAWF,KAAEG,EAAcH,EAElC,GAEMI,EAAOC,EAAY,SAACC,EAAcC,GAAiB,IAAAC,EAErC,OAAlBA,EAAAX,EAAWY,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAgBR,EAAY,WAC5BN,EAAeU,UAKnBV,EAAeU,SAAU,EACzBN,GAAe,GAGfW,WAAW,WACTV,EAAK,OAAMW,EAAA,CAAA,EAAOpB,GACpB,EAAG,KACL,EAAG,CAACA,EAASS,IAEPY,EAAYX,EAChB,SAACY,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKZ,MACX,IAAK,sBACHV,EAAO0B,gBAAP1B,EAAO0B,eAAiBJ,EAAKX,SAC7B,MACF,IAAK,gBACHX,EAAO2B,WAAP3B,EAAO2B,UAAYL,EAAKX,SACxB,MACF,IAAK,mBACHX,EAAO4B,cAAP5B,EAAO4B,aAAeN,EAAKX,SAC3B,MACF,IAAK,gBACHX,EAAOoB,WAAPpB,EAAOoB,UAAYE,EAAKX,SACxB,MACF,IAAK,cACiB,MAApBX,EAAO6B,eAAP7B,EAAO6B,cAAgBP,EAAKX,SAC5B,MACF,IAAK,iBACmB,MAAtBX,EAAO8B,iBAAP9B,EAAO8B,kBACP,MACF,IAAK,eACH9B,MAAAA,EAAO+B,eAAP/B,EAAO+B,gBACP,MACF,IAAK,wBACH/B,EAAOgC,kBAAPhC,EAAOgC,mBACP,MACF,IAAK,gBACHhC,EAAOiC,WAAPjC,EAAOiC,YACP,MACF,IAAK,eACiB,MAApBjC,EAAOkC,eAAPlC,EAAOkC,cAAgBZ,EAAKX,SAC5B,MACF,IAAK,cACsB,MAAzBX,EAAOmC,oBAAPnC,EAAOmC,mBAAqBb,EAAKX,SACjC,MACF,IAAK,aAC4B,MAA/BX,EAAOoC,0BAAPpC,EAAOoC,yBAA2Bd,EAAKX,SACvC,MACF,IAAK,iBACuC,MAA1CX,EAAOqC,qCAAPrC,EAAOqC,oCAAsCf,EAAKX,SAClD,MACF,IAAK,YACHX,MAAAA,EAAOsC,mBAAPtC,EAAOsC,kBAAoBhB,EAAKX,SAChC,MACF,IAAK,QACHX,MAAAA,EAAOuC,SAAPvC,EAAOuC,QAAUjB,EAAKX,SAK5B,EACA,CAACX,IAGH,MAAO,CACLC,WAAAA,EACAuC,aAAc,CACZC,OAAQ,CAAEC,ouJAAMC,QAAS,qBACzBvB,UAAAA,EACAwB,UAAW3B,EACX4B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC5C,YAAAA,EACA6C,aAAc,WAAF,OAAQ3C,EAAK,gBAAgB,EACzC4C,WAAY,kBAAM5C,EAAK,cAAc,EACrC6C,YAAa,SAACC,UAAiB9C,EAAK,eAAgB,CAAE8C,KAAAA,GAAO,EAC7DC,iBAAkB,WAAF,OAAQ/C,EAAK,aAAa,EAC1CgD,WAAY,kBAAMhD,EAAK,cAAc,EACrCiD,YAAa,WAAM,OAAAjD,EAAK,eAAe,EAE3C,CAMSkD,CAAU3D,EAASC,EAC5B,CAegB,SAAA2D,EAAqBC,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aAIMC,EAAQjE,EANP8D,EAAP7D,mJAISiE,CAAAJ,EAAA/D,iBAIT,OACEoE,gBAACC,EAAO/C,EAAA,CACNgD,IAAKJ,EAAM9D,YACP8D,EAAMvB,aACNsB,EAAY,CAChBD,MAAOA,IAGb"}
|
package/dist/lib.umd.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
!function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("react"),require("react-native-webview")):"function"==typeof define&&define.amd?define(["exports","react","react-native-webview"],e):e((n||self).reactNative={},n.react,n.reactNativeWebview)}(this,function(n,e,
|
|
1
|
+
!function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("react"),require("react-native-webview")):"function"==typeof define&&define.amd?define(["exports","react","react-native-webview"],e):e((n||self).reactNative={},n.react,n.reactNativeWebview)}(this,function(n,e,a){function t(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var o=/*#__PURE__*/t(e);function s(){return s=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},s.apply(null,arguments)}var r=["options","style","webviewProps","webClientUrl"];function i(n,a){return function(n,a){void 0===a&&(a={});var t=e.useRef(null),o=e.useRef(!1),r=e.useState(!1),i=r[0],c=r[1],d=e.useCallback(function(n,e){var a;null==(a=t.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),l=e.useCallback(function(){o.current||(o.current=!0,c(!0),setTimeout(function(){d("INIT",s({},n))},100))},[n,d]),u=e.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:t,webViewProps:{source:{html:'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to process queued message",\n type: "notification",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Operation failed",\n type: "notification",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error("Failed to handle incoming message:", error);\n }\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>',baseUrl:"https://unpkg.com"},onMessage:u,onLoadEnd:l,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!1,allowsInlineMediaPlayback:!0,allowsFileAccess:!0,domStorageEnabled:!0,mediaCapturePermissionGrantType:"grant"},initialized:i,startSession:function(){return d("START_SESSION")},endSession:function(){return d("END_SESSION")},sendMessage:function(n){return d("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return d("TOGGLE_MIC")},toggleMute:function(){return d("TOGGLE_MUTE")},keepSession:function(){return d("KEEP_SESSION")}}}(n,a)}n.UnithConversationView=function(n){var e=n.style,t=n.webviewProps,c=i(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,r));/*#__PURE__*/return o.default.createElement(a.WebView,s({ref:c.webViewRef},c.webViewProps,t,{style:e}))},n.useConversation=i});
|
|
2
2
|
//# sourceMappingURL=lib.umd.js.map
|
package/dist/lib.umd.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lib.umd.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\"\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n webViewBaseUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n \n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n return;\n }\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n // case \"WEB_LOG\":\n // if (typeof console !== \"undefined\") {\n // const level = data.payload?.level || \"log\";\n // const args = Array.isArray(data.payload?.args)\n // ? data.payload.args\n // : [data.payload?.args];\n // const fn = (console as any)[level] || console.log;\n // fn(...args);\n // }\n // break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html, baseUrl: bridge.webViewBaseUrl || \"https://unpkg.com\" },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n // allowsFileAccess: true,\n // domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["useConversation","options","events","bridge","webViewRef","useRef","initializedRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","baseUrl","webViewBaseUrl","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","_excluded","React","WebView","ref"],"mappings":"+sBA2UgBA,EACdC,EACAC,EACAC,GAEA,OAtIF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MACzBC,EAAiBD,UAAO,GAC9BE,EAAsCC,EAAAA,UAAS,GAAxCC,EAAWF,EAAEG,GAAAA,EAAcH,EAAA,GAE5BI,EAAOC,EAAAA,QACX,y6BAAgBT,EAAOU,cA3JzB,wEAgC+D,ywHA2HC,EAC9D,CAACV,EAAOU,eAGJC,EAAOC,EAAAA,YAAY,SAACC,EAAcC,GAAiB,IAAAC,EAEvDA,OAAAA,EAAAd,EAAWe,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAgBR,EAAWA,YAAC,WAC5BT,EAAea,UAInBb,EAAea,SAAU,EACzBT,GAAe,GAGfc,WAAW,WACTV,EAAK,OAAMW,EAAA,GAAOxB,GACpB,EAAG,KACL,EAAG,CAACA,EAASa,IAEPY,EAAYX,EAAAA,YAChB,SAACY,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKZ,MACX,IAAK,sBACHd,EAAO8B,gBAAP9B,EAAO8B,eAAiBJ,EAAKX,SAC7B,MACF,IAAK,UACa,MAAhBf,EAAO+B,WAAP/B,EAAO+B,UAAYL,EAAKX,SACxB,MACF,IAAK,aACHf,MAAAA,EAAOgC,cAAPhC,EAAOgC,aAAeN,EAAKX,SAC3B,MACF,IAAK,gBACHf,EAAOwB,WAAPxB,EAAOwB,UAAYE,EAAKX,SACxB,MACF,IAAK,cACiB,MAApBf,EAAOiC,eAAPjC,EAAOiC,cAAgBP,EAAKX,SAC5B,MACF,IAAK,iBACHf,MAAAA,EAAOkC,iBAAPlC,EAAOkC,kBACP,MACF,IAAK,qBACHlC,EAAOmC,eAAPnC,EAAOmC,gBACP,MACF,IAAK,kBACHnC,MAAAA,EAAOoC,kBAAPpC,EAAOoC,mBACP,MACF,IAAK,gBACHpC,EAAOqC,WAAPrC,EAAOqC,YACP,MACF,IAAK,eACiB,MAApBrC,EAAOsC,eAAPtC,EAAOsC,cAAgBZ,EAAKX,SAC5B,MACF,IAAK,cACHf,MAAAA,EAAOuC,oBAAPvC,EAAOuC,mBAAqBb,EAAKX,SACjC,MACF,IAAK,mBACHf,EAAOwC,0BAAPxC,EAAOwC,yBAA2Bd,EAAKX,SACvC,MACF,IAAK,iBACuC,MAA1Cf,EAAOyC,qCAAPzC,EAAOyC,oCAAsCf,EAAKX,SAClD,MACF,IAAK,YACHf,MAAAA,EAAO0C,mBAAP1C,EAAO0C,kBAAoBhB,EAAKX,SAChC,MAWF,IAAK,cACHf,EAAO2C,SAAP3C,EAAO2C,QAAUjB,EAAKX,SAK5B,EACA,CAACf,IAGH,MAAO,CACLE,WAAAA,EACA0C,aAAc,CACZC,OAAQ,CAAEpC,KAAAA,EAAMqC,QAAS7C,EAAO8C,gBAAkB,qBAClDvB,UAAAA,EACAwB,UAAW3B,EACX4B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAG3BC,gCAAiC,SAEnC7C,YAAAA,EACA8C,aAAc,WAAF,OAAQzC,EAAK,gBAAgB,EACzC0C,WAAY,kBAAM1C,EAAK,cAAc,EACrC2C,YAAa,SAACC,GAAY,OAAK5C,EAAK,eAAgB,CAAE4C,KAAAA,GAAO,EAC7DC,iBAAkB,kBAAM7C,EAAK,aAAa,EAC1C8C,WAAY,WAAM,OAAA9C,EAAK,cAAc,EACrC+C,YAAa,WAAF,OAAQ/C,EAAK,eAAe,EAE3C,CAOSgD,CAAU7D,EAASC,EAAQC,EACpC,yBAUgB,SAAqB4D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACApD,EAAYkD,EAAZlD,aAGMqD,EAAQlE,EANP+D,EAAP9D,mJAISkE,CAAAJ,EAAAK,GAEsC,CAAEvD,aAAAA,iBAEjD,OACEwD,wBAACC,UAAO7C,GACN8C,IAAKL,EAAM9D,YACP8D,EAAMpB,aACNmB,GACJD,MAAOA,IAGb"}
|
|
1
|
+
{"version":3,"file":"lib.umd.js","sources":["../src/index.tsx"],"sourcesContent":["import { Status as StatusType, Mode as ModeType, MicrophoneStatus as MicrophoneStatusType } from \"@unith-ai/core-client\";\nimport React, { useCallback, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n language?: string;\n username?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n webViewBaseUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client@2.0.3-beta.2/dist/lib.web.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => { \n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n try {\n await applyMessage(next);\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to process queued message\",\n type: \"notification\",\n endConversation: false,\n });\n }\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n try {\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Operation failed\",\n type: \"notification\",\n endConversation: false,\n });\n }\n };\n\n const handleIncoming = (event) => { \n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (error) {\n console.error(\"Failed to handle incoming message:\", error);\n }\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const initializedRef = useRef(false);\n const [initialized, setInitialized] = useState(false);\n\n const html = buildHtml(DEFAULT_WEB_CLIENT_URL)\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const handleLoadEnd = useCallback(() => {\n if (initializedRef.current) {\n return;\n }\n\n\n initializedRef.current = true;\n setInitialized(true);\n\n // Add a small delay to ensure WebView is ready to receive messages\n setTimeout(() => {\n post(\"INIT\", { ...options });\n }, 100);\n }, [options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html, baseUrl: \"https://unpkg.com\" },\n onMessage,\n onLoadEnd: handleLoadEnd,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: false,\n allowsInlineMediaPlayback: true,\n allowsFileAccess: true,\n domStorageEnabled: true,\n mediaCapturePermissionGrantType: 'grant',\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n): UseConversationResult {\n return useBridge(options, events,);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\n\nexport type Status = StatusType;\nexport type Mode = ModeType;\nexport type MicrophoneStatus = MicrophoneStatusType;\n\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events);\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","webViewRef","useRef","initializedRef","_useState","useState","initialized","setInitialized","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","handleLoadEnd","setTimeout","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","html","baseUrl","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","allowsFileAccess","domStorageEnabled","mediaCapturePermissionGrantType","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"8oBACA,IAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,gBA0TgB,SAAAC,EACdC,EACAC,GAEA,OAxHF,SACED,EACAC,YAAAA,IAAAA,EAA6B,CAAA,GAE7B,IAAMC,EAAaC,EAAMA,OAAM,MACzBC,EAAiBD,UAAO,GAC9BE,EAAsCC,EAAAA,UAAS,GAAxCC,EAAWF,KAAEG,EAAcH,EAElC,GAEMI,EAAOC,cAAY,SAACC,EAAcC,GAAiB,IAAAC,EAErC,OAAlBA,EAAAX,EAAWY,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAgBR,EAAWA,YAAC,WAC5BN,EAAeU,UAKnBV,EAAeU,SAAU,EACzBN,GAAe,GAGfW,WAAW,WACTV,EAAK,OAAMW,EAAA,CAAA,EAAOpB,GACpB,EAAG,KACL,EAAG,CAACA,EAASS,IAEPY,EAAYX,EAAAA,YAChB,SAACY,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAOP,KAAKQ,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKZ,MACX,IAAK,sBACHV,EAAO0B,gBAAP1B,EAAO0B,eAAiBJ,EAAKX,SAC7B,MACF,IAAK,gBACHX,EAAO2B,WAAP3B,EAAO2B,UAAYL,EAAKX,SACxB,MACF,IAAK,mBACHX,EAAO4B,cAAP5B,EAAO4B,aAAeN,EAAKX,SAC3B,MACF,IAAK,gBACHX,EAAOoB,WAAPpB,EAAOoB,UAAYE,EAAKX,SACxB,MACF,IAAK,cACiB,MAApBX,EAAO6B,eAAP7B,EAAO6B,cAAgBP,EAAKX,SAC5B,MACF,IAAK,iBACmB,MAAtBX,EAAO8B,iBAAP9B,EAAO8B,kBACP,MACF,IAAK,eACH9B,MAAAA,EAAO+B,eAAP/B,EAAO+B,gBACP,MACF,IAAK,wBACH/B,EAAOgC,kBAAPhC,EAAOgC,mBACP,MACF,IAAK,gBACHhC,EAAOiC,WAAPjC,EAAOiC,YACP,MACF,IAAK,eACiB,MAApBjC,EAAOkC,eAAPlC,EAAOkC,cAAgBZ,EAAKX,SAC5B,MACF,IAAK,cACsB,MAAzBX,EAAOmC,oBAAPnC,EAAOmC,mBAAqBb,EAAKX,SACjC,MACF,IAAK,aAC4B,MAA/BX,EAAOoC,0BAAPpC,EAAOoC,yBAA2Bd,EAAKX,SACvC,MACF,IAAK,iBACuC,MAA1CX,EAAOqC,qCAAPrC,EAAOqC,oCAAsCf,EAAKX,SAClD,MACF,IAAK,YACHX,MAAAA,EAAOsC,mBAAPtC,EAAOsC,kBAAoBhB,EAAKX,SAChC,MACF,IAAK,QACHX,MAAAA,EAAOuC,SAAPvC,EAAOuC,QAAUjB,EAAKX,SAK5B,EACA,CAACX,IAGH,MAAO,CACLC,WAAAA,EACAuC,aAAc,CACZC,OAAQ,CAAEC,ouJAAMC,QAAS,qBACzBvB,UAAAA,EACAwB,UAAW3B,EACX4B,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,EAC3BC,kBAAkB,EAClBC,mBAAmB,EACnBC,gCAAiC,SAEnC5C,YAAAA,EACA6C,aAAc,WAAF,OAAQ3C,EAAK,gBAAgB,EACzC4C,WAAY,kBAAM5C,EAAK,cAAc,EACrC6C,YAAa,SAACC,UAAiB9C,EAAK,eAAgB,CAAE8C,KAAAA,GAAO,EAC7DC,iBAAkB,WAAF,OAAQ/C,EAAK,aAAa,EAC1CgD,WAAY,kBAAMhD,EAAK,cAAc,EACrCiD,YAAa,WAAM,OAAAjD,EAAK,eAAe,EAE3C,CAMSkD,CAAU3D,EAASC,EAC5B,yBAegB,SAAqB2D,GACnC,IACAC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aAIMC,EAAQhE,EANP6D,EAAP5D,mJAISgE,CAAAJ,EAAA9D,iBAIT,OACEmE,wBAACC,EAAOA,QAAA9C,EAAA,CACN+C,IAAKJ,EAAM7D,YACP6D,EAAMtB,aACNqB,EAAY,CAChBD,MAAOA,IAGb"}
|