@unith-ai/react-native 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +60 -0
- package/dist/index.d.ts +76 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/lib.js +2 -0
- package/dist/lib.js.map +1 -0
- package/dist/lib.modern.mjs +2 -0
- package/dist/lib.modern.mjs.map +1 -0
- package/dist/lib.module.js +2 -0
- package/dist/lib.module.js.map +1 -0
- package/dist/lib.umd.js +2 -0
- package/dist/lib.umd.js.map +1 -0
- package/package.json +46 -0
package/README.md
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
# @unith-ai/react-native
|
|
2
|
+
|
|
3
|
+
React Native WebView wrapper for the Unith AI digital human SDK. This package embeds the existing web SDK inside a WebView and bridges events/commands via postMessage.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
```sh
|
|
7
|
+
npm install @unith-ai/react-native react-native-webview
|
|
8
|
+
```
|
|
9
|
+
|
|
10
|
+
## Usage (component)
|
|
11
|
+
```tsx
|
|
12
|
+
import { UnithConversationView } from "@unith-ai/react-native";
|
|
13
|
+
|
|
14
|
+
export default function Screen() {
|
|
15
|
+
return (
|
|
16
|
+
<UnithConversationView
|
|
17
|
+
style={{ height: 420 }}
|
|
18
|
+
options={{
|
|
19
|
+
orgId: "YOUR_ORG_ID",
|
|
20
|
+
headId: "YOUR_HEAD_ID",
|
|
21
|
+
apiKey: "YOUR_API_KEY",
|
|
22
|
+
environment: "production",
|
|
23
|
+
language: "en-US",
|
|
24
|
+
mode: "default",
|
|
25
|
+
username: "React Native User",
|
|
26
|
+
allowWakeLock: true,
|
|
27
|
+
microphoneProvider: "azure",
|
|
28
|
+
}}
|
|
29
|
+
onMessage={(msg) => console.log(msg)}
|
|
30
|
+
/>
|
|
31
|
+
);
|
|
32
|
+
}
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Usage (hook + custom WebView)
|
|
36
|
+
```tsx
|
|
37
|
+
import { WebView } from "react-native-webview";
|
|
38
|
+
import { useConversation } from "@unith-ai/react-native";
|
|
39
|
+
|
|
40
|
+
export default function Screen() {
|
|
41
|
+
const convo = useConversation({
|
|
42
|
+
orgId: "YOUR_ORG_ID",
|
|
43
|
+
headId: "YOUR_HEAD_ID",
|
|
44
|
+
apiKey: "YOUR_API_KEY",
|
|
45
|
+
environment: "production",
|
|
46
|
+
language: "en-US",
|
|
47
|
+
mode: "default",
|
|
48
|
+
username: "React Native User",
|
|
49
|
+
allowWakeLock: true,
|
|
50
|
+
microphoneProvider: "azure",
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
return <WebView ref={convo.webViewRef} {...convo.webViewProps} />;
|
|
54
|
+
}
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
## Notes
|
|
58
|
+
- The WebView loads the web SDK from `https://unpkg.com` by default. You can override this with `webClientUrl`.
|
|
59
|
+
- Audio and mic access require a user gesture. Call `startSession()` from a tap handler.
|
|
60
|
+
- WebCodecs must be available in the WebView runtime for VP8 decoding.
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import React from "react";
|
|
2
|
+
import { ViewStyle } from "react-native";
|
|
3
|
+
export type ConversationOptions = {
|
|
4
|
+
orgId: string;
|
|
5
|
+
headId: string;
|
|
6
|
+
apiKey: string;
|
|
7
|
+
environment?: string;
|
|
8
|
+
mode?: string;
|
|
9
|
+
language?: string;
|
|
10
|
+
username?: string;
|
|
11
|
+
allowWakeLock?: boolean;
|
|
12
|
+
fadeTransitionsType?: string;
|
|
13
|
+
microphoneProvider?: "azure" | "eleven_labs" | "custom";
|
|
14
|
+
};
|
|
15
|
+
export type ConversationEvents = {
|
|
16
|
+
onStatusChange?: (prop: {
|
|
17
|
+
status: string;
|
|
18
|
+
}) => void;
|
|
19
|
+
onConnect?: (prop: {
|
|
20
|
+
userId: string;
|
|
21
|
+
headInfo: any;
|
|
22
|
+
microphoneAccess: boolean;
|
|
23
|
+
}) => void;
|
|
24
|
+
onDisconnect?: (prop: any) => void;
|
|
25
|
+
onMessage?: (prop: any) => void;
|
|
26
|
+
onSuggestions?: (prop: {
|
|
27
|
+
suggestions: string[];
|
|
28
|
+
}) => void;
|
|
29
|
+
onTimeoutWarning?: () => void;
|
|
30
|
+
onTimeout?: () => void;
|
|
31
|
+
onMuteStatusChange?: (prop: {
|
|
32
|
+
isMuted: boolean;
|
|
33
|
+
}) => void;
|
|
34
|
+
onSpeakingStart?: () => void;
|
|
35
|
+
onSpeakingEnd?: () => void;
|
|
36
|
+
onKeepSession?: (prop: {
|
|
37
|
+
granted: boolean;
|
|
38
|
+
}) => void;
|
|
39
|
+
onError?: (prop: {
|
|
40
|
+
message: string;
|
|
41
|
+
endConversation: boolean;
|
|
42
|
+
type: string;
|
|
43
|
+
}) => void;
|
|
44
|
+
onMicrophoneError?: (prop: {
|
|
45
|
+
message: string;
|
|
46
|
+
}) => void;
|
|
47
|
+
onMicrophoneStatusChange?: (prop: {
|
|
48
|
+
status: "ON" | "OFF" | "PROCESSING";
|
|
49
|
+
}) => void;
|
|
50
|
+
onMicrophoneSpeechRecognitionResult?: (prop: {
|
|
51
|
+
transcript: string;
|
|
52
|
+
}) => void;
|
|
53
|
+
};
|
|
54
|
+
export type BridgeOptions = {
|
|
55
|
+
webClientUrl?: string;
|
|
56
|
+
};
|
|
57
|
+
export type UseConversationResult = {
|
|
58
|
+
webViewRef: React.RefObject<any>;
|
|
59
|
+
webViewProps: Record<string, any>;
|
|
60
|
+
initialized: boolean;
|
|
61
|
+
startSession: () => void;
|
|
62
|
+
endSession: () => void;
|
|
63
|
+
sendMessage: (text: string) => void;
|
|
64
|
+
toggleMicrophone: () => void;
|
|
65
|
+
toggleMute: () => void;
|
|
66
|
+
keepSession: () => void;
|
|
67
|
+
};
|
|
68
|
+
export declare function useConversation(options: ConversationOptions, events?: ConversationEvents, bridge?: BridgeOptions): UseConversationResult;
|
|
69
|
+
export type UnithConversationViewProps = ConversationEvents & {
|
|
70
|
+
options: ConversationOptions;
|
|
71
|
+
style?: ViewStyle;
|
|
72
|
+
webviewProps?: Record<string, any>;
|
|
73
|
+
webClientUrl?: string;
|
|
74
|
+
};
|
|
75
|
+
export declare function UnithConversationView({ options, style, webviewProps, webClientUrl, ...events }: UnithConversationViewProps): React.JSX.Element;
|
|
76
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAiD,MAAM,OAAO,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAGzC,MAAM,MAAM,mBAAmB,GAAG;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,kBAAkB,CAAC,EAAE,OAAO,GAAG,aAAa,GAAG,QAAQ,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG;IAC/B,cAAc,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACpD,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,GAAG,CAAC;QAAC,gBAAgB,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACzF,YAAY,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IACnC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAChC,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,WAAW,EAAE,MAAM,EAAE,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,gBAAgB,CAAC,EAAE,MAAM,IAAI,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,IAAI,CAAC;IACvB,kBAAkB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IAC1D,eAAe,CAAC,EAAE,MAAM,IAAI,CAAC;IAC7B,aAAa,CAAC,EAAE,MAAM,IAAI,CAAC;IAC3B,aAAa,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAAK,IAAI,CAAC;IACrD,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,eAAe,EAAE,OAAO,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACtF,iBAAiB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACxD,wBAAwB,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,MAAM,EAAE,IAAI,GAAG,KAAK,GAAG,YAAY,CAAA;KAAE,KAAK,IAAI,CAAC;IACnF,mCAAmC,CAAC,EAAE,CAAC,IAAI,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;CAC9E,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,MAAM,MAAM,qBAAqB,GAAG;IAClC,UAAU,EAAE,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACjC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,IAAI,CAAC;IACzB,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,UAAU,EAAE,MAAM,IAAI,CAAC;IACvB,WAAW,EAAE,MAAM,IAAI,CAAC;CACzB,CAAC;AAoPF,wBAAgB,eAAe,CAC7B,OAAO,EAAE,mBAAmB,EAC5B,MAAM,CAAC,EAAE,kBAAkB,EAC3B,MAAM,CAAC,EAAE,aAAa,GACrB,qBAAqB,CAEvB;AAED,MAAM,MAAM,0BAA0B,GAAG,kBAAkB,GAAG;IAC5D,OAAO,EAAE,mBAAmB,CAAC;IAC7B,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,YAAY,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACnC,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,EACpC,OAAO,EACP,KAAK,EACL,YAAY,EACZ,YAAY,EACZ,GAAG,MAAM,EACV,EAAE,0BAA0B,qBAW5B"}
|
package/dist/lib.js
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
var n=require("react"),e=require("react-native-webview");function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var t=/*#__PURE__*/a(n);function o(){return o=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},o.apply(null,arguments)}var i=["options","style","webviewProps","webClientUrl"];function s(e,a,t){return function(e,a,t){void 0===a&&(a={}),void 0===t&&(t={});var i=n.useRef(null),s=n.useState(!1),r=s[0],l=s[1],d=n.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(t.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[t.webClientUrl]),c=n.useCallback(function(n,e){var a;null==(a=i.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=n.useCallback(function(){r||(c("INIT",o({},e)),l(!0))},[r,e,c]),S=n.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==a.onStatusChange||a.onStatusChange(e.payload);break;case"CONNECT":null==a.onConnect||a.onConnect(e.payload);break;case"DISCONNECT":null==a.onDisconnect||a.onDisconnect(e.payload);break;case"MESSAGE":null==a.onMessage||a.onMessage(e.payload);break;case"SUGGESTIONS":null==a.onSuggestions||a.onSuggestions(e.payload);break;case"SPEAKING_START":null==a.onSpeakingStart||a.onSpeakingStart();break;case"SPEAKING_END":null==a.onSpeakingEnd||a.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==a.onTimeoutWarning||a.onTimeoutWarning();break;case"TIMEOUT":null==a.onTimeout||a.onTimeout();break;case"KEEP_SESSION":null==a.onKeepSession||a.onKeepSession(e.payload);break;case"MUTE_STATUS":null==a.onMuteStatusChange||a.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==a.onMicrophoneStatusChange||a.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==a.onMicrophoneSpeechRecognitionResult||a.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==a.onMicrophoneError||a.onMicrophoneError(e.payload);break;case"ERROR":null==a.onError||a.onError(e.payload)}},[a]);return{webViewRef:i,webViewProps:{source:{html:d},onMessage:S,onLoadEnd:u,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!0,allowsInlineMediaPlayback:!0},initialized:r,startSession:function(){return c("START_SESSION")},endSession:function(){return c("END_SESSION")},sendMessage:function(n){return c("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return c("TOGGLE_MIC")},toggleMute:function(){return c("TOGGLE_MUTE")},keepSession:function(){return c("KEEP_SESSION")}}}(e,a,t)}exports.UnithConversationView=function(n){var a=n.style,r=n.webviewProps,l=n.webClientUrl,d=s(n.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(n,i),{webClientUrl:l});/*#__PURE__*/return t.default.createElement(e.WebView,o({ref:d.webViewRef},d.webViewProps,r,{style:a}))},exports.useConversation=s;
|
|
2
|
+
//# sourceMappingURL=lib.js.map
|
package/dist/lib.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lib.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string; \n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: init,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: true,\n allowsInlineMediaPlayback: true,\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"iXAAA,IAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,yBAqSgBC,EACdC,EACAC,EACAC,GAEA,OAlHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,EAAQA,UAAC,GAAxCC,EAAWF,EAAA,GAAEG,EAAcH,KAE5BI,EAAOC,EAAAA,QACX,WAAA,MArIF,s5BAqIkBR,EAAOS,cAxIzB,8DA6HF,+nGAWkE,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,cAAY,SAACC,EAAcC,GAAiB,IAAAC,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAWA,YAAC,WACnBN,IACJK,EAAK,OAAMU,EACNtB,GAAAA,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBW,EAAYV,cAChB,SAACW,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAON,KAAKO,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKX,MACX,IAAK,gBACkB,MAArBb,EAAO4B,gBAAP5B,EAAO4B,eAAiBJ,EAAKV,SAC7B,MACF,IAAK,UACa,MAAhBd,EAAO6B,WAAP7B,EAAO6B,UAAYL,EAAKV,SACxB,MACF,IAAK,aACgB,MAAnBd,EAAO8B,cAAP9B,EAAO8B,aAAeN,EAAKV,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOsB,WAAPtB,EAAOsB,UAAYE,EAAKV,SACxB,MACF,IAAK,cACHd,MAAAA,EAAO+B,eAAP/B,EAAO+B,cAAgBP,EAAKV,SAC5B,MACF,IAAK,iBACHd,MAAAA,EAAOgC,iBAAPhC,EAAOgC,kBACP,MACF,IAAK,eACHhC,MAAAA,EAAOiC,eAAPjC,EAAOiC,gBACP,MACF,IAAK,wBACHjC,EAAOkC,kBAAPlC,EAAOkC,mBACP,MACF,IAAK,gBACHlC,EAAOmC,WAAPnC,EAAOmC,YACP,MACF,IAAK,qBACHnC,EAAOoC,eAAPpC,EAAOoC,cAAgBZ,EAAKV,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOqC,oBAAPrC,EAAOqC,mBAAqBb,EAAKV,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOsC,0BAAPtC,EAAOsC,yBAA2Bd,EAAKV,SACvC,MACF,IAAK,iBACuC,MAA1Cd,EAAOuC,qCAAPvC,EAAOuC,oCAAsCf,EAAKV,SAClD,MACF,IAAK,YACqB,MAAxBd,EAAOwC,mBAAPxC,EAAOwC,kBAAoBhB,EAAKV,SAChC,MACF,IAAK,QACHd,MAAAA,EAAOyC,SAAPzC,EAAOyC,QAAUjB,EAAKV,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACAwC,aAAc,CACZC,OAAQ,CAAEnC,KAAAA,GACVc,UAAAA,EACAsB,UAAWxB,EACXyB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,GAE7BzC,YAAAA,EACA0C,aAAc,WAAM,OAAArC,EAAK,gBAAgB,EACzCsC,WAAY,kBAAMtC,EAAK,cAAc,EACrCuC,YAAa,SAACC,GAAY,OAAKxC,EAAK,eAAgB,CAAEwC,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAAzC,EAAK,aAAa,EAC1C0C,WAAY,kBAAM1C,EAAK,cAAc,EACrC2C,YAAa,WAAM,OAAA3C,EAAK,eAAe,EAE3C,CAOS4C,CAAUxD,EAASC,EAAQC,EACpC,wCASqCuD,OAEnCC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAhD,EAAY8C,EAAZ9C,aAGMiD,EAAQ7D,EANP0D,EAAPzD,mJAIS6D,CAAAJ,EAAA3D,GAEsC,CAAEa,aAAAA,iBAEjD,OACEmD,wBAACC,EAAOA,QAAAzC,EAAA,CACN0C,IAAKJ,EAAMzD,YACPyD,EAAMjB,aACNgB,EAAY,CAChBD,MAAOA,IAGb"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as s}from"react-native-webview";function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},i.apply(null,arguments)}const r=["options","style","webviewProps","webClientUrl"];function l(n,s,r){return function(n,s={},r={}){const l=e(null),[d,c]=a(!1),S=t(()=>`<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("${r.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js"}");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>`,[r.webClientUrl]),u=o((n,e)=>{var a;null==(a=l.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),p=o(()=>{d||(u("INIT",i({},n)),c(!0))},[d,n,u]),E=o(n=>{let e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==s.onStatusChange||s.onStatusChange(e.payload);break;case"CONNECT":null==s.onConnect||s.onConnect(e.payload);break;case"DISCONNECT":null==s.onDisconnect||s.onDisconnect(e.payload);break;case"MESSAGE":null==s.onMessage||s.onMessage(e.payload);break;case"SUGGESTIONS":null==s.onSuggestions||s.onSuggestions(e.payload);break;case"SPEAKING_START":null==s.onSpeakingStart||s.onSpeakingStart();break;case"SPEAKING_END":null==s.onSpeakingEnd||s.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==s.onTimeoutWarning||s.onTimeoutWarning();break;case"TIMEOUT":null==s.onTimeout||s.onTimeout();break;case"KEEP_SESSION":null==s.onKeepSession||s.onKeepSession(e.payload);break;case"MUTE_STATUS":null==s.onMuteStatusChange||s.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==s.onMicrophoneStatusChange||s.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==s.onMicrophoneSpeechRecognitionResult||s.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==s.onMicrophoneError||s.onMicrophoneError(e.payload);break;case"ERROR":null==s.onError||s.onError(e.payload)}},[s]);return{webViewRef:l,webViewProps:{source:{html:S},onMessage:E,onLoadEnd:p,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!0,allowsInlineMediaPlayback:!0},initialized:d,startSession:()=>u("START_SESSION"),endSession:()=>u("END_SESSION"),sendMessage:n=>u("SEND_MESSAGE",{text:n}),toggleMicrophone:()=>u("TOGGLE_MIC"),toggleMute:()=>u("TOGGLE_MUTE"),keepSession:()=>u("KEEP_SESSION")}}(n,s,r)}function d(e){let{options:a,style:t,webviewProps:o,webClientUrl:d}=e;const c=l(a,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,r),{webClientUrl:d});/*#__PURE__*/return n.createElement(s,i({ref:c.webViewRef},c.webViewProps,o,{style:t}))}export{d as UnithConversationView,l as useConversation};
|
|
2
|
+
//# sourceMappingURL=lib.modern.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lib.modern.mjs","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string; \n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: init,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: true,\n allowsInlineMediaPlayback: true,\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","initialized","setInitialized","useState","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,MAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,gBAqSgB,SAAAC,EACdC,EACAC,EACAC,GAEA,OAlHF,SACEF,EACAC,EAA6B,CAAA,EAC7BC,EAAwB,CAAE,GAE1B,MAAMC,EAAaC,EAAY,OACxBC,EAAaC,GAAkBC,GAAS,GAEzCC,EAAOC,EACX,IArIK,q5BAqIWP,EAAOQ,cAxIzB,4rGAyIE,CAACR,EAAOQ,eAGJC,EAAOC,EAAY,CAACC,EAAcC,KAAiB,IAAAC,EAErC,OAAlBA,EAAAZ,EAAWa,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,OAAMC,cAEtC,IAEGM,EAAOR,EAAY,KACnBP,IACJM,EAAK,OAAMU,EACNrB,CAAAA,EAAAA,IAELM,GAAe,KACd,CAACD,EAAaL,EAASW,IAEpBW,EAAYV,EACfW,IACC,IAAIC,EAA6B,KACjC,IACEA,EAAON,KAAKO,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKX,MACX,IAAK,gBACHZ,MAAAA,EAAO2B,gBAAP3B,EAAO2B,eAAiBJ,EAAKV,SAC7B,MACF,IAAK,UACHb,MAAAA,EAAO4B,WAAP5B,EAAO4B,UAAYL,EAAKV,SACxB,MACF,IAAK,mBACHb,EAAO6B,cAAP7B,EAAO6B,aAAeN,EAAKV,SAC3B,MACF,IAAK,UACHb,MAAAA,EAAOqB,WAAPrB,EAAOqB,UAAYE,EAAKV,SACxB,MACF,IAAK,oBACHb,EAAO8B,eAAP9B,EAAO8B,cAAgBP,EAAKV,SAC5B,MACF,IAAK,iBACmB,MAAtBb,EAAO+B,iBAAP/B,EAAO+B,kBACP,MACF,IAAK,eACH/B,MAAAA,EAAOgC,eAAPhC,EAAOgC,gBACP,MACF,IAAK,wBACHhC,EAAOiC,kBAAPjC,EAAOiC,mBACP,MACF,IAAK,gBACHjC,EAAOkC,WAAPlC,EAAOkC,YACP,MACF,IAAK,eACiB,MAApBlC,EAAOmC,eAAPnC,EAAOmC,cAAgBZ,EAAKV,SAC5B,MACF,IAAK,cACHb,MAAAA,EAAOoC,oBAAPpC,EAAOoC,mBAAqBb,EAAKV,SACjC,MACF,IAAK,aACHb,MAAAA,EAAOqC,0BAAPrC,EAAOqC,yBAA2Bd,EAAKV,SACvC,MACF,IAAK,uBACHb,EAAOsC,qCAAPtC,EAAOsC,oCAAsCf,EAAKV,SAClD,MACF,IAAK,YACHb,MAAAA,EAAOuC,mBAAPvC,EAAOuC,kBAAoBhB,EAAKV,SAChC,MACF,IAAK,cACHb,EAAOwC,SAAPxC,EAAOwC,QAAUjB,EAAKV,WAM5B,CAACb,IAGH,MAAO,CACLE,aACAuC,aAAc,CACZC,OAAQ,CAAEnC,QACVc,YACAsB,UAAWxB,EACXyB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,GAE7B1C,cACA2C,aAAcA,IAAMrC,EAAK,iBACzBsC,WAAYA,IAAMtC,EAAK,eACvBuC,YAAcC,GAAiBxC,EAAK,eAAgB,CAAEwC,SACtDC,iBAAkBA,IAAMzC,EAAK,cAC7B0C,WAAYA,IAAM1C,EAAK,eACvB2C,YAAaA,IAAM3C,EAAK,gBAE5B,CAOS4C,CAAUvD,EAASC,EAAQC,EACpC,CASgB,SAAAsD,EAAqBC,OAACzD,QACpCA,EAAO0D,MACPA,EAAKC,aACLA,EAAYjD,aACZA,GAE2B+C,EAC3B,MAAMG,EAAQ7D,EAAgBC,6IAFrB6D,CAAAJ,EAAA3D,GAEsC,CAAEY,8BAEjD,OACEoD,gBAACC,EAAO1C,EAAA,CACN2C,IAAKJ,EAAMzD,YACPyD,EAAMlB,aACNiB,EACJD,CAAAA,MAAOA,IAGb"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import n,{useRef as e,useState as a,useMemo as t,useCallback as o}from"react";import{WebView as i}from"react-native-webview";function r(){return r=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var a=arguments[e];for(var t in a)({}).hasOwnProperty.call(a,t)&&(n[t]=a[t])}return n},r.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function d(n,i,s){return function(n,i,s){void 0===i&&(i={}),void 0===s&&(s={});var d=e(null),l=a(!1),c=l[0],u=l[1],S=t(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(s.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[s.webClientUrl]),p=o(function(n,e){var a;null==(a=d.current)||a.postMessage(JSON.stringify({type:n,payload:e}))},[]),E=o(function(){c||(p("INIT",r({},n)),u(!0))},[c,n,p]),g=o(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==i.onStatusChange||i.onStatusChange(e.payload);break;case"CONNECT":null==i.onConnect||i.onConnect(e.payload);break;case"DISCONNECT":null==i.onDisconnect||i.onDisconnect(e.payload);break;case"MESSAGE":null==i.onMessage||i.onMessage(e.payload);break;case"SUGGESTIONS":null==i.onSuggestions||i.onSuggestions(e.payload);break;case"SPEAKING_START":null==i.onSpeakingStart||i.onSpeakingStart();break;case"SPEAKING_END":null==i.onSpeakingEnd||i.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==i.onTimeoutWarning||i.onTimeoutWarning();break;case"TIMEOUT":null==i.onTimeout||i.onTimeout();break;case"KEEP_SESSION":null==i.onKeepSession||i.onKeepSession(e.payload);break;case"MUTE_STATUS":null==i.onMuteStatusChange||i.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==i.onMicrophoneStatusChange||i.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==i.onMicrophoneSpeechRecognitionResult||i.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==i.onMicrophoneError||i.onMicrophoneError(e.payload);break;case"ERROR":null==i.onError||i.onError(e.payload)}},[i]);return{webViewRef:d,webViewProps:{source:{html:S},onMessage:g,onLoadEnd:E,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!0,allowsInlineMediaPlayback:!0},initialized:c,startSession:function(){return p("START_SESSION")},endSession:function(){return p("END_SESSION")},sendMessage:function(n){return p("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return p("TOGGLE_MIC")},toggleMute:function(){return p("TOGGLE_MUTE")},keepSession:function(){return p("KEEP_SESSION")}}}(n,i,s)}function l(e){var a=e.style,t=e.webviewProps,o=e.webClientUrl,l=d(e.options,function(n,e){if(null==n)return{};var a={};for(var t in n)if({}.hasOwnProperty.call(n,t)){if(-1!==e.indexOf(t))continue;a[t]=n[t]}return a}(e,s),{webClientUrl:o});/*#__PURE__*/return n.createElement(i,r({ref:l.webViewRef},l.webViewProps,t,{style:a}))}export{l as UnithConversationView,d as useConversation};
|
|
2
|
+
//# sourceMappingURL=lib.module.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lib.module.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string; \n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: init,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: true,\n allowsInlineMediaPlayback: true,\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","UnithConversationView","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"qVAAA,IAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,yBAqSgBC,EACdC,EACAC,EACAC,GAEA,OAlHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAY,MAC/BC,EAAsCC,GAAS,GAAxCC,EAAWF,EAAA,GAAEG,EAAcH,KAE5BI,EAAOC,EACX,WAAA,MArIF,s5BAqIkBR,EAAOS,cAxIzB,8DA6HF,+nGAWkE,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,EAAY,SAACC,EAAcC,GAAiB,IAAAC,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAY,WACnBN,IACJK,EAAK,OAAMU,EACNtB,GAAAA,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBW,EAAYV,EAChB,SAACW,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAON,KAAKO,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKX,MACX,IAAK,gBACkB,MAArBb,EAAO4B,gBAAP5B,EAAO4B,eAAiBJ,EAAKV,SAC7B,MACF,IAAK,UACa,MAAhBd,EAAO6B,WAAP7B,EAAO6B,UAAYL,EAAKV,SACxB,MACF,IAAK,aACgB,MAAnBd,EAAO8B,cAAP9B,EAAO8B,aAAeN,EAAKV,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOsB,WAAPtB,EAAOsB,UAAYE,EAAKV,SACxB,MACF,IAAK,cACHd,MAAAA,EAAO+B,eAAP/B,EAAO+B,cAAgBP,EAAKV,SAC5B,MACF,IAAK,iBACHd,MAAAA,EAAOgC,iBAAPhC,EAAOgC,kBACP,MACF,IAAK,eACHhC,MAAAA,EAAOiC,eAAPjC,EAAOiC,gBACP,MACF,IAAK,wBACHjC,EAAOkC,kBAAPlC,EAAOkC,mBACP,MACF,IAAK,gBACHlC,EAAOmC,WAAPnC,EAAOmC,YACP,MACF,IAAK,qBACHnC,EAAOoC,eAAPpC,EAAOoC,cAAgBZ,EAAKV,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOqC,oBAAPrC,EAAOqC,mBAAqBb,EAAKV,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOsC,0BAAPtC,EAAOsC,yBAA2Bd,EAAKV,SACvC,MACF,IAAK,iBACuC,MAA1Cd,EAAOuC,qCAAPvC,EAAOuC,oCAAsCf,EAAKV,SAClD,MACF,IAAK,YACqB,MAAxBd,EAAOwC,mBAAPxC,EAAOwC,kBAAoBhB,EAAKV,SAChC,MACF,IAAK,QACHd,MAAAA,EAAOyC,SAAPzC,EAAOyC,QAAUjB,EAAKV,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACAwC,aAAc,CACZC,OAAQ,CAAEnC,KAAAA,GACVc,UAAAA,EACAsB,UAAWxB,EACXyB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,GAE7BzC,YAAAA,EACA0C,aAAc,WAAM,OAAArC,EAAK,gBAAgB,EACzCsC,WAAY,kBAAMtC,EAAK,cAAc,EACrCuC,YAAa,SAACC,GAAY,OAAKxC,EAAK,eAAgB,CAAEwC,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAAzC,EAAK,aAAa,EAC1C0C,WAAY,kBAAM1C,EAAK,cAAc,EACrC2C,YAAa,WAAM,OAAA3C,EAAK,eAAe,EAE3C,CAOS4C,CAAUxD,EAASC,EAAQC,EACpC,UASgBuD,EAAqBC,OAEnCC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAjD,EAAY+C,EAAZ/C,aAGMkD,EAAQ9D,EANP2D,EAAP1D,mJAIS8D,CAAAJ,EAAA5D,GAEsC,CAAEa,aAAAA,iBAEjD,OACEoD,gBAACC,EAAO1C,EAAA,CACN2C,IAAKJ,EAAM1D,YACP0D,EAAMlB,aACNiB,EAAY,CAChBD,MAAOA,IAGb"}
|
package/dist/lib.umd.js
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
!function(n,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("react"),require("react-native-webview")):"function"==typeof define&&define.amd?define(["exports","react","react-native-webview"],e):e((n||self).reactNative={},n.react,n.reactNativeWebview)}(this,function(n,e,t){function a(n){return n&&"object"==typeof n&&"default"in n?n:{default:n}}var o=/*#__PURE__*/a(e);function i(){return i=Object.assign?Object.assign.bind():function(n){for(var e=1;e<arguments.length;e++){var t=arguments[e];for(var a in t)({}).hasOwnProperty.call(t,a)&&(n[a]=t[a])}return n},i.apply(null,arguments)}var s=["options","style","webviewProps","webClientUrl"];function r(n,t,a){return function(n,t,a){void 0===t&&(t={}),void 0===a&&(a={});var o=e.useRef(null),s=e.useState(!1),r=s[0],l=s[1],d=e.useMemo(function(){return'<!doctype html>\n<html>\n <head>\n <meta charset="utf-8" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id="root"></div>\n <script type="module">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === "INIT") {\n try {\n const { Conversation } = await import("'+(a.webClientUrl||"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js")+'");\n\n if (!("VideoDecoder" in window)) {\n send("ERROR", {\n message: "WebCodecs VideoDecoder is not supported on this device.",\n type: "modal",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById("root");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send("STATUS_CHANGE", data),\n onConnect: (data) => send("CONNECT", data),\n onDisconnect: (data) => send("DISCONNECT", data),\n onMessage: (data) => send("MESSAGE", data),\n onSuggestions: (data) => send("SUGGESTIONS", data),\n onSpeakingStart: () => send("SPEAKING_START"),\n onSpeakingEnd: () => send("SPEAKING_END"),\n onTimeoutWarning: () => send("TIMEOUT_WARNING"),\n onTimeout: () => send("TIMEOUT"),\n onKeepSession: (data) => send("KEEP_SESSION", data),\n onMuteStatusChange: (data) => send("MUTE_STATUS", data),\n onError: (data) => send("ERROR", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send("MIC_ERROR", data),\n onMicrophoneStatusChange: (data) => send("MIC_STATUS", data),\n onMicrophoneSpeechRecognitionResult: (data) => send("MIC_TRANSCRIPT", data),\n },\n });\n\n ready = true;\n send("READY");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send("ERROR", {\n message: error?.message || "Failed to initialize conversation",\n type: "modal",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case "START_SESSION":\n await conversation.startSession();\n break;\n case "END_SESSION":\n await conversation.endSession();\n break;\n case "SEND_MESSAGE":\n await conversation.sendMessage(payload?.text || "");\n break;\n case "TOGGLE_MIC":\n await conversation.toggleMicrophone();\n break;\n case "TOGGLE_MUTE":\n await conversation.toggleMute();\n break;\n case "KEEP_SESSION":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== "INIT") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener("message", handleIncoming);\n document.addEventListener("message", handleIncoming);\n <\/script>\n </body>\n</html>'},[a.webClientUrl]),c=e.useCallback(function(n,e){var t;null==(t=o.current)||t.postMessage(JSON.stringify({type:n,payload:e}))},[]),u=e.useCallback(function(){r||(c("INIT",i({},n)),l(!0))},[r,n,c]),S=e.useCallback(function(n){var e=null;try{e=JSON.parse(n.nativeEvent.data)}catch(n){return}if(e)switch(e.type){case"STATUS_CHANGE":null==t.onStatusChange||t.onStatusChange(e.payload);break;case"CONNECT":null==t.onConnect||t.onConnect(e.payload);break;case"DISCONNECT":null==t.onDisconnect||t.onDisconnect(e.payload);break;case"MESSAGE":null==t.onMessage||t.onMessage(e.payload);break;case"SUGGESTIONS":null==t.onSuggestions||t.onSuggestions(e.payload);break;case"SPEAKING_START":null==t.onSpeakingStart||t.onSpeakingStart();break;case"SPEAKING_END":null==t.onSpeakingEnd||t.onSpeakingEnd();break;case"TIMEOUT_WARNING":null==t.onTimeoutWarning||t.onTimeoutWarning();break;case"TIMEOUT":null==t.onTimeout||t.onTimeout();break;case"KEEP_SESSION":null==t.onKeepSession||t.onKeepSession(e.payload);break;case"MUTE_STATUS":null==t.onMuteStatusChange||t.onMuteStatusChange(e.payload);break;case"MIC_STATUS":null==t.onMicrophoneStatusChange||t.onMicrophoneStatusChange(e.payload);break;case"MIC_TRANSCRIPT":null==t.onMicrophoneSpeechRecognitionResult||t.onMicrophoneSpeechRecognitionResult(e.payload);break;case"MIC_ERROR":null==t.onMicrophoneError||t.onMicrophoneError(e.payload);break;case"ERROR":null==t.onError||t.onError(e.payload)}},[t]);return{webViewRef:o,webViewProps:{source:{html:d},onMessage:S,onLoadEnd:u,javaScriptEnabled:!0,mediaPlaybackRequiresUserAction:!0,allowsInlineMediaPlayback:!0},initialized:r,startSession:function(){return c("START_SESSION")},endSession:function(){return c("END_SESSION")},sendMessage:function(n){return c("SEND_MESSAGE",{text:n})},toggleMicrophone:function(){return c("TOGGLE_MIC")},toggleMute:function(){return c("TOGGLE_MUTE")},keepSession:function(){return c("KEEP_SESSION")}}}(n,t,a)}n.UnithConversationView=function(n){var e=n.style,a=n.webviewProps,l=n.webClientUrl,d=r(n.options,function(n,e){if(null==n)return{};var t={};for(var a in n)if({}.hasOwnProperty.call(n,a)){if(-1!==e.indexOf(a))continue;t[a]=n[a]}return t}(n,s),{webClientUrl:l});/*#__PURE__*/return o.default.createElement(t.WebView,i({ref:d.webViewRef},d.webViewProps,a,{style:e}))},n.useConversation=r});
|
|
2
|
+
//# sourceMappingURL=lib.umd.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lib.umd.js","sources":["../src/index.tsx"],"sourcesContent":["import React, { useCallback, useMemo, useRef, useState } from \"react\";\nimport { ViewStyle } from \"react-native\";\nimport { WebView, WebViewMessageEvent } from \"react-native-webview\";\n\nexport type ConversationOptions = {\n orgId: string;\n headId: string;\n apiKey: string;\n environment?: string; \n mode?: string;\n language?: string;\n username?: string;\n allowWakeLock?: boolean;\n fadeTransitionsType?: string;\n microphoneProvider?: \"azure\" | \"eleven_labs\" | \"custom\";\n};\n\nexport type ConversationEvents = {\n onStatusChange?: (prop: { status: string }) => void;\n onConnect?: (prop: { userId: string; headInfo: any; microphoneAccess: boolean }) => void;\n onDisconnect?: (prop: any) => void;\n onMessage?: (prop: any) => void;\n onSuggestions?: (prop: { suggestions: string[] }) => void;\n onTimeoutWarning?: () => void;\n onTimeout?: () => void;\n onMuteStatusChange?: (prop: { isMuted: boolean }) => void;\n onSpeakingStart?: () => void;\n onSpeakingEnd?: () => void;\n onKeepSession?: (prop: { granted: boolean }) => void;\n onError?: (prop: { message: string; endConversation: boolean; type: string }) => void;\n onMicrophoneError?: (prop: { message: string }) => void;\n onMicrophoneStatusChange?: (prop: { status: \"ON\" | \"OFF\" | \"PROCESSING\" }) => void;\n onMicrophoneSpeechRecognitionResult?: (prop: { transcript: string }) => void;\n};\n\nexport type BridgeOptions = {\n webClientUrl?: string;\n};\n\nexport type UseConversationResult = {\n webViewRef: React.RefObject<any>;\n webViewProps: Record<string, any>;\n initialized: boolean;\n startSession: () => void;\n endSession: () => void;\n sendMessage: (text: string) => void;\n toggleMicrophone: () => void;\n toggleMute: () => void;\n keepSession: () => void;\n};\n\ntype BridgeMessage = {\n type: string;\n payload?: any;\n};\n\nconst DEFAULT_WEB_CLIENT_URL =\n \"https://unpkg.com/@unith-ai/core-client/dist/lib.module.js\";\n\nfunction buildHtml(webClientUrl: string) {\n return `<!doctype html>\n<html>\n <head>\n <meta charset=\"utf-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <style>\n html, body { margin: 0; padding: 0; width: 100%; height: 100%; background: #0b1020; }\n #root { position: relative; width: 100%; height: 100%; overflow: hidden; }\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n const send = (type, payload) => {\n if (window.ReactNativeWebView) {\n window.ReactNativeWebView.postMessage(JSON.stringify({ type, payload }));\n }\n };\n\n let conversation = null;\n let queue = [];\n let ready = false;\n\n const applyMessage = async (msg) => {\n const { type, payload } = msg || {};\n if (!type) return;\n\n if (type === \"INIT\") {\n try {\n const { Conversation } = await import(\"${webClientUrl}\");\n\n if (!(\"VideoDecoder\" in window)) {\n send(\"ERROR\", {\n message: \"WebCodecs VideoDecoder is not supported on this device.\",\n type: \"modal\",\n endConversation: true,\n });\n return;\n }\n\n const element = document.getElementById(\"root\");\n conversation = await Conversation.startDigitalHuman({\n ...payload,\n element,\n onStatusChange: (data) => send(\"STATUS_CHANGE\", data),\n onConnect: (data) => send(\"CONNECT\", data),\n onDisconnect: (data) => send(\"DISCONNECT\", data),\n onMessage: (data) => send(\"MESSAGE\", data),\n onSuggestions: (data) => send(\"SUGGESTIONS\", data),\n onSpeakingStart: () => send(\"SPEAKING_START\"),\n onSpeakingEnd: () => send(\"SPEAKING_END\"),\n onTimeoutWarning: () => send(\"TIMEOUT_WARNING\"),\n onTimeout: () => send(\"TIMEOUT\"),\n onKeepSession: (data) => send(\"KEEP_SESSION\", data),\n onMuteStatusChange: (data) => send(\"MUTE_STATUS\", data),\n onError: (data) => send(\"ERROR\", data),\n microphoneOptions: {\n onMicrophoneError: (data) => send(\"MIC_ERROR\", data),\n onMicrophoneStatusChange: (data) => send(\"MIC_STATUS\", data),\n onMicrophoneSpeechRecognitionResult: (data) => send(\"MIC_TRANSCRIPT\", data),\n },\n });\n\n ready = true;\n send(\"READY\");\n\n while (queue.length > 0) {\n const next = queue.shift();\n await applyMessage(next);\n }\n } catch (error) {\n send(\"ERROR\", {\n message: error?.message || \"Failed to initialize conversation\",\n type: \"modal\",\n endConversation: true,\n });\n }\n return;\n }\n\n if (!ready || !conversation) return;\n\n switch (type) {\n case \"START_SESSION\":\n await conversation.startSession();\n break;\n case \"END_SESSION\":\n await conversation.endSession();\n break;\n case \"SEND_MESSAGE\":\n await conversation.sendMessage(payload?.text || \"\");\n break;\n case \"TOGGLE_MIC\":\n await conversation.toggleMicrophone();\n break;\n case \"TOGGLE_MUTE\":\n await conversation.toggleMute();\n break;\n case \"KEEP_SESSION\":\n await conversation.keepSession();\n break;\n default:\n break;\n }\n };\n\n const handleIncoming = (event) => {\n try {\n const msg = JSON.parse(event.data);\n if (!ready && msg.type !== \"INIT\") {\n queue.push(msg);\n return;\n }\n applyMessage(msg);\n } catch (_) {}\n };\n\n window.addEventListener(\"message\", handleIncoming);\n document.addEventListener(\"message\", handleIncoming);\n </script>\n </body>\n</html>`;\n}\n\nfunction useBridge(\n options: ConversationOptions,\n events: ConversationEvents = {},\n bridge: BridgeOptions = {}\n): UseConversationResult {\n const webViewRef = useRef<any>(null);\n const [initialized, setInitialized] = useState(false);\n\n const html = useMemo(\n () => buildHtml(bridge.webClientUrl || DEFAULT_WEB_CLIENT_URL),\n [bridge.webClientUrl]\n );\n\n const post = useCallback((type: string, payload?: any) => {\n const message: BridgeMessage = { type, payload };\n webViewRef.current?.postMessage(JSON.stringify(message));\n }, []);\n\n const init = useCallback(() => {\n if (initialized) return;\n post(\"INIT\", {\n ...options,\n });\n setInitialized(true);\n }, [initialized, options, post]);\n\n const onMessage = useCallback(\n (event: WebViewMessageEvent) => {\n let data: BridgeMessage | null = null;\n try {\n data = JSON.parse(event.nativeEvent.data);\n } catch (_) {\n return;\n }\n if (!data) return;\n\n switch (data.type) {\n case \"STATUS_CHANGE\":\n events.onStatusChange?.(data.payload);\n break;\n case \"CONNECT\":\n events.onConnect?.(data.payload);\n break;\n case \"DISCONNECT\":\n events.onDisconnect?.(data.payload);\n break;\n case \"MESSAGE\":\n events.onMessage?.(data.payload);\n break;\n case \"SUGGESTIONS\":\n events.onSuggestions?.(data.payload);\n break;\n case \"SPEAKING_START\":\n events.onSpeakingStart?.();\n break;\n case \"SPEAKING_END\":\n events.onSpeakingEnd?.();\n break;\n case \"TIMEOUT_WARNING\":\n events.onTimeoutWarning?.();\n break;\n case \"TIMEOUT\":\n events.onTimeout?.();\n break;\n case \"KEEP_SESSION\":\n events.onKeepSession?.(data.payload);\n break;\n case \"MUTE_STATUS\":\n events.onMuteStatusChange?.(data.payload);\n break;\n case \"MIC_STATUS\":\n events.onMicrophoneStatusChange?.(data.payload);\n break;\n case \"MIC_TRANSCRIPT\":\n events.onMicrophoneSpeechRecognitionResult?.(data.payload);\n break;\n case \"MIC_ERROR\":\n events.onMicrophoneError?.(data.payload);\n break;\n case \"ERROR\":\n events.onError?.(data.payload);\n break;\n default:\n break;\n }\n },\n [events]\n );\n\n return {\n webViewRef,\n webViewProps: {\n source: { html },\n onMessage,\n onLoadEnd: init,\n javaScriptEnabled: true,\n mediaPlaybackRequiresUserAction: true,\n allowsInlineMediaPlayback: true,\n },\n initialized,\n startSession: () => post(\"START_SESSION\"),\n endSession: () => post(\"END_SESSION\"),\n sendMessage: (text: string) => post(\"SEND_MESSAGE\", { text }),\n toggleMicrophone: () => post(\"TOGGLE_MIC\"),\n toggleMute: () => post(\"TOGGLE_MUTE\"),\n keepSession: () => post(\"KEEP_SESSION\"),\n };\n}\n\nexport function useConversation(\n options: ConversationOptions,\n events?: ConversationEvents,\n bridge?: BridgeOptions\n): UseConversationResult {\n return useBridge(options, events, bridge);\n}\n\nexport type UnithConversationViewProps = ConversationEvents & {\n options: ConversationOptions;\n style?: ViewStyle;\n webviewProps?: Record<string, any>;\n webClientUrl?: string;\n};\n\nexport function UnithConversationView({\n options,\n style,\n webviewProps,\n webClientUrl,\n ...events\n}: UnithConversationViewProps) {\n const convo = useConversation(options, events, { webClientUrl });\n\n return (\n <WebView\n ref={convo.webViewRef}\n {...convo.webViewProps}\n {...webviewProps}\n style={style}\n />\n );\n}\n"],"names":["_excluded","useConversation","options","events","bridge","webViewRef","useRef","_useState","useState","initialized","setInitialized","html","useMemo","webClientUrl","post","useCallback","type","payload","_webViewRef$current","current","postMessage","JSON","stringify","init","_extends","onMessage","event","data","parse","nativeEvent","_","onStatusChange","onConnect","onDisconnect","onSuggestions","onSpeakingStart","onSpeakingEnd","onTimeoutWarning","onTimeout","onKeepSession","onMuteStatusChange","onMicrophoneStatusChange","onMicrophoneSpeechRecognitionResult","onMicrophoneError","onError","webViewProps","source","onLoadEnd","javaScriptEnabled","mediaPlaybackRequiresUserAction","allowsInlineMediaPlayback","startSession","endSession","sendMessage","text","toggleMicrophone","toggleMute","keepSession","useBridge","_ref","style","webviewProps","convo","_objectWithoutPropertiesLoose","React","WebView","ref"],"mappings":"8oBAAA,IAAAA,EAAA,CAAA,UAAA,QAAA,eAAA,yBAqSgBC,EACdC,EACAC,EACAC,GAEA,OAlHF,SACEF,EACAC,EACAC,QADAD,IAAAA,IAAAA,EAA6B,CAAE,YAC/BC,IAAAA,EAAwB,IAExB,IAAMC,EAAaC,EAAAA,OAAY,MAC/BC,EAAsCC,EAAQA,UAAC,GAAxCC,EAAWF,EAAA,GAAEG,EAAcH,KAE5BI,EAAOC,EAAAA,QACX,WAAA,MArIF,s5BAqIkBR,EAAOS,cAxIzB,8DA6HF,+nGAWkE,EAC9D,CAACT,EAAOS,eAGJC,EAAOC,cAAY,SAACC,EAAcC,GAAiB,IAAAC,EAErC,OAAlBA,EAAAb,EAAWc,UAAXD,EAAoBE,YAAYC,KAAKC,UADN,CAAEN,KAAAA,EAAMC,QAAAA,IAEzC,EAAG,IAEGM,EAAOR,EAAWA,YAAC,WACnBN,IACJK,EAAK,OAAMU,EACNtB,GAAAA,IAELQ,GAAe,GACjB,EAAG,CAACD,EAAaP,EAASY,IAEpBW,EAAYV,cAChB,SAACW,GACC,IAAIC,EAA6B,KACjC,IACEA,EAAON,KAAKO,MAAMF,EAAMG,YAAYF,KACtC,CAAE,MAAOG,GACP,MACF,CACA,GAAKH,EAEL,OAAQA,EAAKX,MACX,IAAK,gBACkB,MAArBb,EAAO4B,gBAAP5B,EAAO4B,eAAiBJ,EAAKV,SAC7B,MACF,IAAK,UACa,MAAhBd,EAAO6B,WAAP7B,EAAO6B,UAAYL,EAAKV,SACxB,MACF,IAAK,aACgB,MAAnBd,EAAO8B,cAAP9B,EAAO8B,aAAeN,EAAKV,SAC3B,MACF,IAAK,UACa,MAAhBd,EAAOsB,WAAPtB,EAAOsB,UAAYE,EAAKV,SACxB,MACF,IAAK,cACHd,MAAAA,EAAO+B,eAAP/B,EAAO+B,cAAgBP,EAAKV,SAC5B,MACF,IAAK,iBACHd,MAAAA,EAAOgC,iBAAPhC,EAAOgC,kBACP,MACF,IAAK,eACHhC,MAAAA,EAAOiC,eAAPjC,EAAOiC,gBACP,MACF,IAAK,wBACHjC,EAAOkC,kBAAPlC,EAAOkC,mBACP,MACF,IAAK,gBACHlC,EAAOmC,WAAPnC,EAAOmC,YACP,MACF,IAAK,qBACHnC,EAAOoC,eAAPpC,EAAOoC,cAAgBZ,EAAKV,SAC5B,MACF,IAAK,cACsB,MAAzBd,EAAOqC,oBAAPrC,EAAOqC,mBAAqBb,EAAKV,SACjC,MACF,IAAK,aAC4B,MAA/Bd,EAAOsC,0BAAPtC,EAAOsC,yBAA2Bd,EAAKV,SACvC,MACF,IAAK,iBACuC,MAA1Cd,EAAOuC,qCAAPvC,EAAOuC,oCAAsCf,EAAKV,SAClD,MACF,IAAK,YACqB,MAAxBd,EAAOwC,mBAAPxC,EAAOwC,kBAAoBhB,EAAKV,SAChC,MACF,IAAK,QACHd,MAAAA,EAAOyC,SAAPzC,EAAOyC,QAAUjB,EAAKV,SAK5B,EACA,CAACd,IAGH,MAAO,CACLE,WAAAA,EACAwC,aAAc,CACZC,OAAQ,CAAEnC,KAAAA,GACVc,UAAAA,EACAsB,UAAWxB,EACXyB,mBAAmB,EACnBC,iCAAiC,EACjCC,2BAA2B,GAE7BzC,YAAAA,EACA0C,aAAc,WAAM,OAAArC,EAAK,gBAAgB,EACzCsC,WAAY,kBAAMtC,EAAK,cAAc,EACrCuC,YAAa,SAACC,GAAY,OAAKxC,EAAK,eAAgB,CAAEwC,KAAAA,GAAO,EAC7DC,iBAAkB,WAAM,OAAAzC,EAAK,aAAa,EAC1C0C,WAAY,kBAAM1C,EAAK,cAAc,EACrC2C,YAAa,WAAM,OAAA3C,EAAK,eAAe,EAE3C,CAOS4C,CAAUxD,EAASC,EAAQC,EACpC,kCASqCuD,OAEnCC,EAAKD,EAALC,MACAC,EAAYF,EAAZE,aACAhD,EAAY8C,EAAZ9C,aAGMiD,EAAQ7D,EANP0D,EAAPzD,mJAIS6D,CAAAJ,EAAA3D,GAEsC,CAAEa,aAAAA,iBAEjD,OACEmD,wBAACC,EAAOA,QAAAzC,EAAA,CACN0C,IAAKJ,EAAMzD,YACPyD,EAAMjB,aACNgB,EAAY,CAChBD,MAAOA,IAGb"}
|
package/package.json
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@unith-ai/react-native",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "React Native WebView wrapper for Unith AI digital humans",
|
|
5
|
+
"main": "./dist/lib.js",
|
|
6
|
+
"module": "./dist/lib.module.js",
|
|
7
|
+
"source": "src/index.tsx",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"files": [
|
|
10
|
+
"dist",
|
|
11
|
+
"README.md"
|
|
12
|
+
],
|
|
13
|
+
"keywords": [
|
|
14
|
+
"react-native",
|
|
15
|
+
"webview",
|
|
16
|
+
"unith",
|
|
17
|
+
"digital-human",
|
|
18
|
+
"ai"
|
|
19
|
+
],
|
|
20
|
+
"author": "UNITH AI",
|
|
21
|
+
"license": "MIT",
|
|
22
|
+
"peerDependencies": {
|
|
23
|
+
"react": ">=16.8.0",
|
|
24
|
+
"react-native": ">=0.70.0",
|
|
25
|
+
"react-native-webview": ">=11.0.0"
|
|
26
|
+
},
|
|
27
|
+
"devDependencies": {
|
|
28
|
+
"@types/react": "^18.3.3",
|
|
29
|
+
"eslint": "^9.8.0",
|
|
30
|
+
"microbundle": "^0.15.1",
|
|
31
|
+
"typescript": "^5.5.4"
|
|
32
|
+
},
|
|
33
|
+
"repository": {
|
|
34
|
+
"type": "git",
|
|
35
|
+
"url": "git+https://github.com/unith-ai/libraries",
|
|
36
|
+
"directory": "packages/react-native"
|
|
37
|
+
},
|
|
38
|
+
"scripts": {
|
|
39
|
+
"build": "npm run clean && microbundle --jsx React.createElement --jsxFragment React.Fragment --jsxImportSource react src/index.tsx",
|
|
40
|
+
"clean": "rm -rf ./dist",
|
|
41
|
+
"dev": "npm run clean && microbundle --jsx React.createElement --jsxFragment React.Fragment --jsxImportSource react src/index.tsx -w",
|
|
42
|
+
"lint:ts": "tsc --noEmit --skipLibCheck",
|
|
43
|
+
"lint:es": "npx eslint .",
|
|
44
|
+
"lint:prettier": "prettier 'src/**/*.{ts,tsx}'"
|
|
45
|
+
}
|
|
46
|
+
}
|