@revrag-ai/embed-react-native 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +20 -0
- package/Onwid.podspec +20 -0
- package/README.md +402 -0
- package/android/build.gradle +83 -0
- package/android/gradle.properties +5 -0
- package/ios/Onwid.h +5 -0
- package/ios/Onwid.mm +18 -0
- package/lib/index.d.ts +77 -0
- package/lib/module/Event/onwid.js +74 -0
- package/lib/module/NativeOnwid.js +4 -0
- package/lib/module/component/OnwidButton.js +366 -0
- package/lib/module/component/audiowave.js +137 -0
- package/lib/module/component/voice.js +103 -0
- package/lib/module/hooks/initialize.js +92 -0
- package/lib/module/hooks/initialize.types.js +2 -0
- package/lib/module/hooks/initializelivekit.js +14 -0
- package/lib/module/hooks/voiceAgent.js +334 -0
- package/lib/module/hooks/voiceAgent.types.js +2 -0
- package/lib/module/index.js +61 -0
- package/lib/module/onwidApi/api.js +184 -0
- package/lib/module/onwidApi/api.types.js +2 -0
- package/lib/module/store.key.js +47 -0
- package/lib/module/style/onwidButton.style.js +230 -0
- package/lib/module/utils/reanimatedHelpers.js +87 -0
- package/lib/module/utils/utils.js +1 -0
- package/lib/typescript/Event/onwid.d.ts +13 -0
- package/lib/typescript/NativeOnwid.d.ts +6 -0
- package/lib/typescript/component/OnwidButton.d.ts +28 -0
- package/lib/typescript/component/audiowave.d.ts +6 -0
- package/lib/typescript/component/voice.d.ts +15 -0
- package/lib/typescript/hooks/initialize.d.ts +2 -0
- package/lib/typescript/hooks/initialize.types.d.ts +5 -0
- package/lib/typescript/hooks/initializelivekit.d.ts +3 -0
- package/lib/typescript/hooks/voiceAgent.d.ts +2 -0
- package/lib/typescript/hooks/voiceAgent.types.d.ts +16 -0
- package/lib/typescript/index.d.ts +27 -0
- package/lib/typescript/onwidApi/api.d.ts +53 -0
- package/lib/typescript/onwidApi/api.types.d.ts +21 -0
- package/lib/typescript/store.key.d.ts +3 -0
- package/lib/typescript/style/onwidButton.style.d.ts +98 -0
- package/lib/typescript/utils/reanimatedHelpers.d.ts +29 -0
- package/lib/typescript/utils/utils.d.ts +0 -0
- package/package.json +208 -0
- package/react-native.config.js +19 -0
- package/scripts/verify-setup.js +90 -0
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.OnwidButton = OnwidButton;
|
|
7
|
+
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
8
|
+
const lottie_react_native_1 = __importDefault(require("lottie-react-native"));
|
|
9
|
+
const react_1 = require("react");
|
|
10
|
+
const react_native_1 = require("react-native");
|
|
11
|
+
const react_native_gesture_handler_1 = require("react-native-gesture-handler");
|
|
12
|
+
const react_native_linear_gradient_1 = __importDefault(require("react-native-linear-gradient"));
|
|
13
|
+
const voice_1 = __importDefault(require("../component/voice"));
|
|
14
|
+
const voiceAgent_1 = require("../hooks/voiceAgent");
|
|
15
|
+
const store_key_1 = require("../store.key");
|
|
16
|
+
const onwidButton_style_1 = require("../style/onwidButton.style");
|
|
17
|
+
const reanimatedHelpers_1 = require("../utils/reanimatedHelpers");
|
|
18
|
+
const audiowave_1 = require("./audiowave");
|
|
19
|
+
// Get reanimated API with fallbacks
|
|
20
|
+
const { useSharedValue, useAnimatedStyle, withTiming, withSpring, withRepeat, withSequence, runOnJS, Easing, Animated, isAvailable: isReanimatedAvailable, } = (0, reanimatedHelpers_1.getReanimatedAPI)();
|
|
21
|
+
// Show warning if reanimated is not available
|
|
22
|
+
if (!isReanimatedAvailable) {
|
|
23
|
+
(0, reanimatedHelpers_1.showReanimatedSetupError)();
|
|
24
|
+
}
|
|
25
|
+
const { width: SCREEN_WIDTH, height: SCREEN_HEIGHT } = react_native_1.Dimensions.get('window');
|
|
26
|
+
const BUTTON_WIDTH = 60;
|
|
27
|
+
const EXPANDED_WIDTH = SCREEN_WIDTH * 0.9;
|
|
28
|
+
const BUTTON_HEIGHT = 60;
|
|
29
|
+
const GRADIENT_COLORS = ['#1E0844', '#B391F3'];
|
|
30
|
+
// Define mic icons as base64 images for portability
|
|
31
|
+
const MIC_ON_ICON = 'https://revrag-dev.s3.ap-south-1.amazonaws.com/Avatars/Mute+button.png';
|
|
32
|
+
const MIC_OFF_ICON = 'https://revrag-dev.s3.ap-south-1.amazonaws.com/Avatars/unmute.png';
|
|
33
|
+
// Add end call icon
|
|
34
|
+
const END_CALL_ICON = 'https://revrag-dev.s3.ap-south-1.amazonaws.com/Avatars/end+button.png';
|
|
35
|
+
const AMPLIFY_ANIMATION = 'https://revrag-dev.s3.ap-south-1.amazonaws.com/Avatars/amplify.json';
|
|
36
|
+
/**
|
|
37
|
+
* Default styles configuration for the button
|
|
38
|
+
*/
|
|
39
|
+
const defaultStyles = {
|
|
40
|
+
buttonWidth: 60,
|
|
41
|
+
buttonHeight: 60,
|
|
42
|
+
borderRadius: 100,
|
|
43
|
+
marginBottom: 20,
|
|
44
|
+
spacing: {
|
|
45
|
+
SMALL: 10,
|
|
46
|
+
MEDIUM: 15,
|
|
47
|
+
LARGE: 25,
|
|
48
|
+
},
|
|
49
|
+
};
|
|
50
|
+
/**
|
|
51
|
+
* OnwidButton Component
|
|
52
|
+
*
|
|
53
|
+
* A floating action button that can be dragged around the screen and expanded to show additional content.
|
|
54
|
+
* Features include:
|
|
55
|
+
* - Draggable functionality
|
|
56
|
+
* - Expandable menu
|
|
57
|
+
* - Animated transitions
|
|
58
|
+
* - Gradient background
|
|
59
|
+
* - Customizable styling
|
|
60
|
+
*
|
|
61
|
+
* @component
|
|
62
|
+
* @example
|
|
63
|
+
* ```tsx
|
|
64
|
+
* <OnwidButton
|
|
65
|
+
* isOpen={false}
|
|
66
|
+
* onPress={(isOpen) => console.log('Button pressed:', isOpen)}
|
|
67
|
+
* menuComponent={<YourMenuComponent />}
|
|
68
|
+
* />
|
|
69
|
+
* ```
|
|
70
|
+
*/
|
|
71
|
+
function OnwidButton() {
|
|
72
|
+
var _a;
|
|
73
|
+
const { initializeVoiceAgent, tokenDetails, endCall, isLoading, isMicMuted, muteMic, unmuteMic, connectionState, roomRef, } = (0, voiceAgent_1.useVoiceAgent)();
|
|
74
|
+
// State management
|
|
75
|
+
const [configData, setConfigData] = (0, react_1.useState)(null);
|
|
76
|
+
const [isOpen, setIsOpen] = (0, react_1.useState)(false);
|
|
77
|
+
const [callDuration, setCallDuration] = (0, react_1.useState)(0);
|
|
78
|
+
const timerRef = (0, react_1.useRef)(null);
|
|
79
|
+
const lottieRef = (0, react_1.useRef)(null);
|
|
80
|
+
console.log('roomRef', (_a = roomRef.current) === null || _a === void 0 ? void 0 : _a.localParticipant);
|
|
81
|
+
// Animation values
|
|
82
|
+
const isPressed = useSharedValue(false);
|
|
83
|
+
const offset = useSharedValue({ x: 0, y: 0 });
|
|
84
|
+
const start = useSharedValue({ x: 0, y: 0 });
|
|
85
|
+
const menuAnimation = useSharedValue(0);
|
|
86
|
+
const buttonWidth = useSharedValue(BUTTON_WIDTH);
|
|
87
|
+
const buttonScale = useSharedValue(1);
|
|
88
|
+
// Styles
|
|
89
|
+
const styles = (0, onwidButton_style_1.createOnwidButtonStyles)(defaultStyles);
|
|
90
|
+
const [isAutoOpen, setIsAutoOpen] = (0, react_1.useState)(false);
|
|
91
|
+
(0, react_1.useEffect)(() => {
|
|
92
|
+
const autoOpenTimer = setTimeout(() => {
|
|
93
|
+
if (!isOpen) {
|
|
94
|
+
console.log('autoOpenTimer', isOpen);
|
|
95
|
+
setIsAutoOpen(true);
|
|
96
|
+
}
|
|
97
|
+
}, 15000); // 15 seconds
|
|
98
|
+
return () => {
|
|
99
|
+
clearTimeout(autoOpenTimer);
|
|
100
|
+
};
|
|
101
|
+
}, [isOpen]);
|
|
102
|
+
/**
|
|
103
|
+
* Fetch agent configuration data
|
|
104
|
+
*/
|
|
105
|
+
(0, react_1.useEffect)(() => {
|
|
106
|
+
const fetchAgentData = async () => {
|
|
107
|
+
try {
|
|
108
|
+
const data = await (0, store_key_1.getAgentData)('@config_data');
|
|
109
|
+
setConfigData(data === null || data === void 0 ? void 0 : data.ui_config);
|
|
110
|
+
}
|
|
111
|
+
catch (error) {
|
|
112
|
+
console.error('Error retrieving agent data:', error);
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
fetchAgentData();
|
|
116
|
+
}, []);
|
|
117
|
+
/**
|
|
118
|
+
* Set up a timer to track call duration when connected
|
|
119
|
+
*/
|
|
120
|
+
(0, react_1.useEffect)(() => {
|
|
121
|
+
if (connectionState === 'connected' && !timerRef.current) {
|
|
122
|
+
timerRef.current = setInterval(() => {
|
|
123
|
+
setCallDuration((prev) => prev + 1);
|
|
124
|
+
}, 1000);
|
|
125
|
+
}
|
|
126
|
+
else if (connectionState !== 'connected' && timerRef.current) {
|
|
127
|
+
clearInterval(timerRef.current);
|
|
128
|
+
timerRef.current = null;
|
|
129
|
+
// If we were previously connected and now disconnected, show an error
|
|
130
|
+
if (callDuration > 0) {
|
|
131
|
+
console.log('Call unexpectedly disconnected after', callDuration, 'seconds');
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
return () => {
|
|
135
|
+
if (timerRef.current) {
|
|
136
|
+
clearInterval(timerRef.current);
|
|
137
|
+
timerRef.current = null;
|
|
138
|
+
}
|
|
139
|
+
};
|
|
140
|
+
}, [connectionState, callDuration]);
|
|
141
|
+
/**
|
|
142
|
+
* Handle menu animation and button width transitions
|
|
143
|
+
*/
|
|
144
|
+
(0, react_1.useEffect)(() => {
|
|
145
|
+
menuAnimation.value = withTiming(isOpen ? 0.8 : 0, {
|
|
146
|
+
duration: 300,
|
|
147
|
+
});
|
|
148
|
+
buttonWidth.value = withTiming(isOpen ? EXPANDED_WIDTH : BUTTON_WIDTH);
|
|
149
|
+
}, [isOpen, menuAnimation, buttonWidth]);
|
|
150
|
+
// Add breathing animation when button is closed but isAutoOpen is true
|
|
151
|
+
(0, react_1.useEffect)(() => {
|
|
152
|
+
if (!isOpen && isAutoOpen) {
|
|
153
|
+
// Start breathing animation with faster speed
|
|
154
|
+
buttonScale.value = withRepeat(withSequence(withTiming(1.1, {
|
|
155
|
+
duration: 1500, // Reduced from 1000ms to 600ms
|
|
156
|
+
easing: Easing.inOut(Easing.ease),
|
|
157
|
+
}), withTiming(1, {
|
|
158
|
+
duration: 1500, // Reduced from 1000ms to 600ms
|
|
159
|
+
easing: Easing.inOut(Easing.ease),
|
|
160
|
+
})), -1, // Infinite repeat
|
|
161
|
+
false // Don't reverse
|
|
162
|
+
);
|
|
163
|
+
}
|
|
164
|
+
else {
|
|
165
|
+
// Reset animation
|
|
166
|
+
buttonScale.value = withTiming(1, { duration: 300 });
|
|
167
|
+
}
|
|
168
|
+
}, [isOpen, isAutoOpen]);
|
|
169
|
+
/**
|
|
170
|
+
* Animated styles for the button
|
|
171
|
+
*/
|
|
172
|
+
const animatedStyles = useAnimatedStyle(() => {
|
|
173
|
+
const maxX = SCREEN_WIDTH - (isOpen ? EXPANDED_WIDTH : BUTTON_WIDTH) - 35;
|
|
174
|
+
const clampedX = Math.min(Math.max(offset.value.x, -maxX), 0);
|
|
175
|
+
return {
|
|
176
|
+
width: buttonWidth.value,
|
|
177
|
+
height: BUTTON_HEIGHT,
|
|
178
|
+
transform: [
|
|
179
|
+
{ translateX: clampedX },
|
|
180
|
+
{ translateY: offset.value.y },
|
|
181
|
+
{ scale: withSpring(isPressed.value ? 0.95 : buttonScale.value) },
|
|
182
|
+
],
|
|
183
|
+
justifyContent: isOpen ? 'space-between' : 'flex-start',
|
|
184
|
+
overflow: 'hidden',
|
|
185
|
+
};
|
|
186
|
+
});
|
|
187
|
+
/**
|
|
188
|
+
* Animated styles for the text
|
|
189
|
+
*/
|
|
190
|
+
const animatedTextStyles = useAnimatedStyle(() => {
|
|
191
|
+
const maxX = SCREEN_WIDTH;
|
|
192
|
+
const clampedX = Math.min(Math.max(offset.value.x, -maxX), 0);
|
|
193
|
+
return {
|
|
194
|
+
transform: [
|
|
195
|
+
{ translateX: clampedX },
|
|
196
|
+
{ translateY: offset.value.y },
|
|
197
|
+
{ scale: withSpring(isPressed.value ? 1 : 1) },
|
|
198
|
+
],
|
|
199
|
+
};
|
|
200
|
+
});
|
|
201
|
+
/**
|
|
202
|
+
* Pan gesture handler for drag functionality
|
|
203
|
+
*/
|
|
204
|
+
const gesture = react_native_gesture_handler_1.Gesture.Pan()
|
|
205
|
+
.onBegin(() => {
|
|
206
|
+
isPressed.value = true;
|
|
207
|
+
if (isAutoOpen) {
|
|
208
|
+
runOnJS(setIsAutoOpen)(false);
|
|
209
|
+
}
|
|
210
|
+
})
|
|
211
|
+
.onUpdate((e) => {
|
|
212
|
+
const maxX = SCREEN_WIDTH - (isOpen ? EXPANDED_WIDTH : BUTTON_WIDTH) - 0;
|
|
213
|
+
const newX = Math.min(Math.max(e.translationX + start.value.x, -maxX), 0);
|
|
214
|
+
const maxY = SCREEN_HEIGHT - 150;
|
|
215
|
+
const newY = Math.min(Math.max(e.translationY + start.value.y, -maxY), 0);
|
|
216
|
+
offset.value = {
|
|
217
|
+
x: newX,
|
|
218
|
+
y: newY,
|
|
219
|
+
};
|
|
220
|
+
})
|
|
221
|
+
.onEnd(() => {
|
|
222
|
+
start.value = {
|
|
223
|
+
x: offset.value.x,
|
|
224
|
+
y: offset.value.y,
|
|
225
|
+
};
|
|
226
|
+
})
|
|
227
|
+
.onFinalize(() => {
|
|
228
|
+
isPressed.value = false;
|
|
229
|
+
});
|
|
230
|
+
/**
|
|
231
|
+
* Handle button press events
|
|
232
|
+
*/
|
|
233
|
+
const handlePress = () => {
|
|
234
|
+
console.log('handlePress', isOpen);
|
|
235
|
+
console.log('isAutoOpen', isAutoOpen);
|
|
236
|
+
// cleanup();
|
|
237
|
+
setIsOpen(!isOpen);
|
|
238
|
+
setIsAutoOpen(false);
|
|
239
|
+
};
|
|
240
|
+
const handleStartCall = async () => {
|
|
241
|
+
setCallDuration(0);
|
|
242
|
+
await initializeVoiceAgent();
|
|
243
|
+
};
|
|
244
|
+
/**
|
|
245
|
+
* Render the button icon/animation
|
|
246
|
+
*/
|
|
247
|
+
const remoteSource = (0, react_1.useMemo)(() => ({
|
|
248
|
+
uri: (configData === null || configData === void 0 ? void 0 : configData.icon_animation) || AMPLIFY_ANIMATION,
|
|
249
|
+
}), [configData === null || configData === void 0 ? void 0 : configData.icon_animation]);
|
|
250
|
+
const renderIcon = () => {
|
|
251
|
+
// When isAutoOpen is true, we don't play the Lottie animation
|
|
252
|
+
return ((0, jsx_runtime_1.jsx)(react_native_1.View, { children: (0, jsx_runtime_1.jsx)(lottie_react_native_1.default, { ref: lottieRef, source: remoteSource, autoPlay: true, loop: true, style: styles.iconImage, enableMergePathsAndroidForKitKatAndAbove: true, enableSafeModeAndroid: true }) }));
|
|
253
|
+
};
|
|
254
|
+
const handleConnected = () => {
|
|
255
|
+
console.log('Call connected');
|
|
256
|
+
};
|
|
257
|
+
const handleEndCall = async () => {
|
|
258
|
+
setIsOpen(false);
|
|
259
|
+
if (timerRef.current) {
|
|
260
|
+
clearInterval(timerRef.current);
|
|
261
|
+
timerRef.current = null;
|
|
262
|
+
}
|
|
263
|
+
setCallDuration(0);
|
|
264
|
+
await endCall();
|
|
265
|
+
};
|
|
266
|
+
const handleMicToggle = () => {
|
|
267
|
+
if (isMicMuted) {
|
|
268
|
+
unmuteMic();
|
|
269
|
+
}
|
|
270
|
+
else {
|
|
271
|
+
muteMic();
|
|
272
|
+
}
|
|
273
|
+
};
|
|
274
|
+
// Format duration to MM:SS
|
|
275
|
+
const formatDuration = (seconds) => {
|
|
276
|
+
const minutes = Math.floor(seconds / 60);
|
|
277
|
+
const remainingSeconds = seconds % 60;
|
|
278
|
+
return `${minutes.toString().padStart(2, '0')}:${remainingSeconds
|
|
279
|
+
.toString()
|
|
280
|
+
.padStart(2, '0')}`;
|
|
281
|
+
};
|
|
282
|
+
// Get the status text based on current state
|
|
283
|
+
const getStatusText = () => {
|
|
284
|
+
if (isLoading) {
|
|
285
|
+
return 'Connecting...';
|
|
286
|
+
}
|
|
287
|
+
else if (tokenDetails === null || tokenDetails === void 0 ? void 0 : tokenDetails.token) {
|
|
288
|
+
return `Call Duration: ${formatDuration(callDuration)}`;
|
|
289
|
+
}
|
|
290
|
+
else {
|
|
291
|
+
return (configData === null || configData === void 0 ? void 0 : configData.agent_type) || 'Onboarding Agent';
|
|
292
|
+
}
|
|
293
|
+
};
|
|
294
|
+
if (!configData)
|
|
295
|
+
return null;
|
|
296
|
+
return ((0, jsx_runtime_1.jsxs)(react_native_1.View, { style: styles.container, children: [isAutoOpen && !isOpen && ((0, jsx_runtime_1.jsx)(Animated.View, { style: [
|
|
297
|
+
animatedTextStyles,
|
|
298
|
+
{
|
|
299
|
+
position: 'absolute',
|
|
300
|
+
borderRadius: 5,
|
|
301
|
+
paddingVertical: 2,
|
|
302
|
+
paddingHorizontal: 10,
|
|
303
|
+
backgroundColor: 'rgba(0, 0, 0, 0.5)',
|
|
304
|
+
bottom: BUTTON_HEIGHT + 40,
|
|
305
|
+
// right: Math.abs(offset.value.x) + BUTTON_WIDTH + 0,
|
|
306
|
+
},
|
|
307
|
+
], children: (0, jsx_runtime_1.jsx)(react_native_1.Text, { style: { color: 'white', fontSize: 10, fontWeight: '500' }, children: (configData === null || configData === void 0 ? void 0 : configData.popup_description) || 'Revrag' }) })), (0, jsx_runtime_1.jsx)(react_native_gesture_handler_1.GestureDetector, { gesture: gesture, children: (0, jsx_runtime_1.jsx)(Animated.View, { style: [
|
|
308
|
+
styles.button,
|
|
309
|
+
animatedStyles,
|
|
310
|
+
styles.buttonContent,
|
|
311
|
+
{
|
|
312
|
+
pointerEvents: 'auto',
|
|
313
|
+
},
|
|
314
|
+
], children: (0, jsx_runtime_1.jsxs)(react_native_linear_gradient_1.default, { colors: (configData === null || configData === void 0 ? void 0 : configData.gradient) || GRADIENT_COLORS, start: { x: 0, y: 0 }, end: { x: 1, y: 0 }, style: [
|
|
315
|
+
styles.linearGradient,
|
|
316
|
+
{
|
|
317
|
+
width: '100%',
|
|
318
|
+
flexDirection: 'row',
|
|
319
|
+
alignItems: 'center',
|
|
320
|
+
paddingHorizontal: 0,
|
|
321
|
+
paddingLeft: 0,
|
|
322
|
+
paddingRight: isOpen ? 5 : 0,
|
|
323
|
+
},
|
|
324
|
+
], angle: 0, angleCenter: { x: 0.5, y: 0.5 }, children: [(tokenDetails === null || tokenDetails === void 0 ? void 0 : tokenDetails.token) && ((0, jsx_runtime_1.jsx)(voice_1.default, { url: tokenDetails === null || tokenDetails === void 0 ? void 0 : tokenDetails.server_url, token: tokenDetails === null || tokenDetails === void 0 ? void 0 : tokenDetails.token, onDisconnected: handleEndCall, roomRef: roomRef, onConnected: handleConnected })), (0, jsx_runtime_1.jsx)(react_native_1.View, { style: [
|
|
325
|
+
styles.rowContainer,
|
|
326
|
+
{
|
|
327
|
+
flexShrink: 0,
|
|
328
|
+
width: BUTTON_WIDTH,
|
|
329
|
+
padding: 0,
|
|
330
|
+
margin: 0,
|
|
331
|
+
},
|
|
332
|
+
], children: (0, jsx_runtime_1.jsx)(react_native_1.TouchableOpacity, { onPress: handlePress, style: styles.pressable, children: renderIcon() }) }), isOpen && ((0, jsx_runtime_1.jsxs)(react_native_1.View, { style: {
|
|
333
|
+
flex: 1,
|
|
334
|
+
flexDirection: 'row',
|
|
335
|
+
height: BUTTON_HEIGHT - 10,
|
|
336
|
+
marginLeft: 0,
|
|
337
|
+
marginRight: 0,
|
|
338
|
+
}, children: [(0, jsx_runtime_1.jsxs)(react_native_1.View, { style: {
|
|
339
|
+
flex: 1,
|
|
340
|
+
justifyContent: 'center',
|
|
341
|
+
alignItems: 'flex-start',
|
|
342
|
+
paddingLeft: 8,
|
|
343
|
+
paddingRight: 4,
|
|
344
|
+
}, children: [(0, jsx_runtime_1.jsx)(react_native_1.Text, { style: [
|
|
345
|
+
styles.agentNameText,
|
|
346
|
+
{ flexShrink: 1, textAlign: 'left' },
|
|
347
|
+
], children: (configData === null || configData === void 0 ? void 0 : configData.agent_name) || 'Revrag' }), (0, jsx_runtime_1.jsx)(react_native_1.Text, { style: [
|
|
348
|
+
styles.statusText,
|
|
349
|
+
{ flexShrink: 1, textAlign: 'left' },
|
|
350
|
+
], children: getStatusText() })] }), (0, jsx_runtime_1.jsx)(react_native_1.View, { style: {
|
|
351
|
+
flex: 1,
|
|
352
|
+
justifyContent: 'center',
|
|
353
|
+
alignItems: 'center',
|
|
354
|
+
paddingHorizontal: 4,
|
|
355
|
+
}, children: (0, jsx_runtime_1.jsx)(audiowave_1.WaveformVisualizer, { roomRef: roomRef }) }), (0, jsx_runtime_1.jsxs)(react_native_1.View, { style: {
|
|
356
|
+
flex: 1,
|
|
357
|
+
justifyContent: 'center',
|
|
358
|
+
alignItems: 'flex-end',
|
|
359
|
+
paddingLeft: 4,
|
|
360
|
+
paddingRight: 8,
|
|
361
|
+
}, children: [!(tokenDetails === null || tokenDetails === void 0 ? void 0 : tokenDetails.token) && ((0, jsx_runtime_1.jsx)(react_native_1.TouchableOpacity, { onPress: handleStartCall, style: styles.startCallButton, children: (0, jsx_runtime_1.jsx)(react_native_1.Text, { style: styles.startCallText, children: (configData === null || configData === void 0 ? void 0 : configData.start_call_text) || 'Start Call' }) })), (tokenDetails === null || tokenDetails === void 0 ? void 0 : tokenDetails.token) && ((0, jsx_runtime_1.jsxs)(react_native_1.View, { style: [styles.buttonContainer], children: [(0, jsx_runtime_1.jsx)(react_native_1.TouchableOpacity, { style: styles.muteButton, onPress: handleMicToggle, children: (0, jsx_runtime_1.jsx)(react_native_1.Image, { source: {
|
|
362
|
+
uri: isMicMuted ? MIC_OFF_ICON : MIC_ON_ICON,
|
|
363
|
+
}, style: styles.buttonImage }) }), (0, jsx_runtime_1.jsx)(react_native_1.TouchableOpacity, { onPress: handleEndCall, style: styles.endCallButton, children: (0, jsx_runtime_1.jsx)(react_native_1.Image, { source: { uri: END_CALL_ICON }, style: styles.buttonImage }) })] }))] })] }))] }) }) })] }));
|
|
364
|
+
}
|
|
365
|
+
// Export default for easier imports
|
|
366
|
+
exports.default = OnwidButton;
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.WaveformVisualizer = void 0;
|
|
4
|
+
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
5
|
+
const react_1 = require("react");
|
|
6
|
+
const react_native_1 = require("react-native");
|
|
7
|
+
// React Native compatible waveform simulator
|
|
8
|
+
const useReactNativeAudioWaveform = (roomRef) => {
|
|
9
|
+
const [isAudioActive, setIsAudioActive] = (0, react_1.useState)(false);
|
|
10
|
+
const intervalRef = (0, react_1.useRef)(null);
|
|
11
|
+
const [currentHeights, setCurrentHeights] = (0, react_1.useState)(Array(10).fill(0));
|
|
12
|
+
// Create animated values for each bar
|
|
13
|
+
const barCount = 10;
|
|
14
|
+
const animatedBars = (0, react_1.useRef)(Array(barCount)
|
|
15
|
+
.fill(0)
|
|
16
|
+
.map(() => new react_native_1.Animated.Value(0))).current;
|
|
17
|
+
(0, react_1.useEffect)(() => {
|
|
18
|
+
// Check if there's an active room connection AND if agent is talking
|
|
19
|
+
const checkAudioActivity = () => {
|
|
20
|
+
const room = roomRef.current;
|
|
21
|
+
if ((room === null || room === void 0 ? void 0 : room.state) !== 'connected') {
|
|
22
|
+
setIsAudioActive(false);
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
// Check if any remote participant is currently speaking
|
|
26
|
+
let isAgentSpeaking = false;
|
|
27
|
+
// Loop through all remote participants
|
|
28
|
+
room.remoteParticipants.forEach((participant) => {
|
|
29
|
+
const audioTrackPublications = Array.from(participant.getTrackPublications().values());
|
|
30
|
+
const remoteAudioTrack = audioTrackPublications.find((pub) => { var _a; return ((_a = pub.track) === null || _a === void 0 ? void 0 : _a.kind) === 'audio'; });
|
|
31
|
+
// Check if this participant has audio track, is not muted, and is actively speaking
|
|
32
|
+
if ((remoteAudioTrack === null || remoteAudioTrack === void 0 ? void 0 : remoteAudioTrack.track) && !(remoteAudioTrack === null || remoteAudioTrack === void 0 ? void 0 : remoteAudioTrack.isMuted)) {
|
|
33
|
+
// Check audio level to detect actual speech (optional but more accurate)
|
|
34
|
+
const audioLevel = participant.audioLevel || 0;
|
|
35
|
+
if (audioLevel > 0.05) {
|
|
36
|
+
// Threshold for detecting speech
|
|
37
|
+
isAgentSpeaking = true;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
setIsAudioActive(isAgentSpeaking);
|
|
42
|
+
};
|
|
43
|
+
// Initial check
|
|
44
|
+
checkAudioActivity();
|
|
45
|
+
// Set up periodic checking for room state changes
|
|
46
|
+
intervalRef.current = setInterval(checkAudioActivity, 500);
|
|
47
|
+
// Clean up on unmount
|
|
48
|
+
return () => {
|
|
49
|
+
if (intervalRef.current) {
|
|
50
|
+
clearInterval(intervalRef.current);
|
|
51
|
+
intervalRef.current = null;
|
|
52
|
+
}
|
|
53
|
+
setIsAudioActive(false);
|
|
54
|
+
};
|
|
55
|
+
}, [roomRef]);
|
|
56
|
+
// Continuous smooth animation
|
|
57
|
+
(0, react_1.useEffect)(() => {
|
|
58
|
+
const animateWaveform = () => {
|
|
59
|
+
// Generate smooth waveform data - stop animation completely when not active
|
|
60
|
+
const targetHeights = isAudioActive
|
|
61
|
+
? Array(barCount)
|
|
62
|
+
.fill(0)
|
|
63
|
+
.map((_, index) => {
|
|
64
|
+
const timeOffset = Date.now() / 800 + index * 0.3;
|
|
65
|
+
const baseHeight = 0.5;
|
|
66
|
+
const amplitude = 0.5;
|
|
67
|
+
const height = baseHeight + amplitude * Math.abs(Math.sin(timeOffset));
|
|
68
|
+
return Math.max(0.1, Math.min(1.0, height));
|
|
69
|
+
})
|
|
70
|
+
: Array(barCount).fill(0); // Completely freeze animation when mic is muted
|
|
71
|
+
// Update current heights for conditional logic
|
|
72
|
+
setCurrentHeights(targetHeights);
|
|
73
|
+
const animations = animatedBars.map((animatedValue, index) => {
|
|
74
|
+
const targetHeight = targetHeights[index] || 0;
|
|
75
|
+
return react_native_1.Animated.timing(animatedValue, {
|
|
76
|
+
toValue: targetHeight,
|
|
77
|
+
duration: isAudioActive ? 400 : 600, // Slower fade out when going inactive
|
|
78
|
+
useNativeDriver: false,
|
|
79
|
+
});
|
|
80
|
+
});
|
|
81
|
+
react_native_1.Animated.parallel(animations).start();
|
|
82
|
+
};
|
|
83
|
+
// Start animation immediately and repeat
|
|
84
|
+
animateWaveform();
|
|
85
|
+
const animationInterval = setInterval(animateWaveform, 300);
|
|
86
|
+
return () => {
|
|
87
|
+
clearInterval(animationInterval);
|
|
88
|
+
};
|
|
89
|
+
}, [isAudioActive, animatedBars]);
|
|
90
|
+
return {
|
|
91
|
+
animatedBars,
|
|
92
|
+
currentHeights,
|
|
93
|
+
isActive: isAudioActive,
|
|
94
|
+
};
|
|
95
|
+
};
|
|
96
|
+
const WaveformVisualizer = ({ roomRef }) => {
|
|
97
|
+
const { animatedBars, currentHeights, isActive } = useReactNativeAudioWaveform(roomRef);
|
|
98
|
+
console.log('animatedBars', animatedBars);
|
|
99
|
+
return ((0, jsx_runtime_1.jsx)(react_native_1.View, { style: {
|
|
100
|
+
flexDirection: 'row',
|
|
101
|
+
alignItems: 'center',
|
|
102
|
+
height: '100%',
|
|
103
|
+
alignSelf: 'center',
|
|
104
|
+
// width: '100%',
|
|
105
|
+
// flex: 1,
|
|
106
|
+
justifyContent: 'center',
|
|
107
|
+
// position: 'absolute',
|
|
108
|
+
zIndex: 1000,
|
|
109
|
+
}, children: animatedBars.map((animatedHeight, idx) => {
|
|
110
|
+
// Use the tracked height values instead of trying to access animated value directly
|
|
111
|
+
const currentHeightValue = currentHeights[idx] || 0.1;
|
|
112
|
+
// Apply conditional logic based on height
|
|
113
|
+
let conditionalValue;
|
|
114
|
+
if (currentHeightValue > 0.7) {
|
|
115
|
+
conditionalValue = 1;
|
|
116
|
+
}
|
|
117
|
+
else if (currentHeightValue >= 0.4 && currentHeightValue <= 0.5) {
|
|
118
|
+
conditionalValue = 5;
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
conditionalValue = 1;
|
|
122
|
+
}
|
|
123
|
+
// You can use conditionalValue for width, color, or other properties
|
|
124
|
+
return ((0, jsx_runtime_1.jsx)(react_native_1.Animated.View, { style: {
|
|
125
|
+
width: conditionalValue === 10 ? 4 : 4,
|
|
126
|
+
borderRadius: 100, // Example: wider bars for value 5
|
|
127
|
+
height: animatedHeight.interpolate({
|
|
128
|
+
inputRange: [0, 1],
|
|
129
|
+
outputRange: [0, 25],
|
|
130
|
+
}),
|
|
131
|
+
alignSelf: 'center',
|
|
132
|
+
backgroundColor: idx <= 1 || idx >= 8 ? 'rgba(255, 255, 255, 0.5)' : 'white',
|
|
133
|
+
margin: 1.5,
|
|
134
|
+
} }, idx));
|
|
135
|
+
}) }));
|
|
136
|
+
};
|
|
137
|
+
exports.WaveformVisualizer = WaveformVisualizer;
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
4
|
+
const react_native_1 = require("@livekit/react-native");
|
|
5
|
+
const react_1 = require("react");
|
|
6
|
+
// Track audio session state globally to prevent multiple starts/stops
|
|
7
|
+
let audioSessionActive = false;
|
|
8
|
+
let audioSessionStarting = false;
|
|
9
|
+
let audioSessionStopping = false;
|
|
10
|
+
// Track connection to prevent unmounting while connected
|
|
11
|
+
let activeConnectionToken = null;
|
|
12
|
+
const Voice = (props) => {
|
|
13
|
+
const { url, token, onDisconnected, onConnected, roomRef } = props;
|
|
14
|
+
const [audioSessionStarted, setAudioSessionStarted] = (0, react_1.useState)(audioSessionActive);
|
|
15
|
+
const mountedRef = (0, react_1.useRef)(true);
|
|
16
|
+
const connectedRef = (0, react_1.useRef)(false);
|
|
17
|
+
// Start audio session safely - prevent multiple simultaneous starts
|
|
18
|
+
const startAudioSession = async () => {
|
|
19
|
+
if (audioSessionActive || audioSessionStarting) {
|
|
20
|
+
console.log('Audio session already active or starting, skipping');
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
try {
|
|
24
|
+
audioSessionStarting = true;
|
|
25
|
+
console.log('Starting audio session');
|
|
26
|
+
await react_native_1.AudioSession.startAudioSession();
|
|
27
|
+
audioSessionActive = true;
|
|
28
|
+
if (mountedRef.current) {
|
|
29
|
+
setAudioSessionStarted(true);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
catch (err) {
|
|
33
|
+
console.error('Failed to start audio session:', err);
|
|
34
|
+
}
|
|
35
|
+
finally {
|
|
36
|
+
audioSessionStarting = false;
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
// Stop audio session safely - prevent multiple simultaneous stops
|
|
40
|
+
const stopAudioSession = async () => {
|
|
41
|
+
if (!audioSessionActive || audioSessionStopping) {
|
|
42
|
+
console.log('Audio session already inactive or stopping, skipping');
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
try {
|
|
46
|
+
audioSessionStopping = true;
|
|
47
|
+
console.log('Stopping audio session');
|
|
48
|
+
await react_native_1.AudioSession.stopAudioSession();
|
|
49
|
+
audioSessionActive = false;
|
|
50
|
+
}
|
|
51
|
+
catch (err) {
|
|
52
|
+
console.error('Failed to stop audio session:', err);
|
|
53
|
+
}
|
|
54
|
+
finally {
|
|
55
|
+
audioSessionStopping = false;
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
// Setup audio session
|
|
59
|
+
(0, react_1.useEffect)(() => {
|
|
60
|
+
mountedRef.current = true;
|
|
61
|
+
startAudioSession();
|
|
62
|
+
return () => {
|
|
63
|
+
var _a;
|
|
64
|
+
mountedRef.current = false;
|
|
65
|
+
// IMPORTANT: Don't stop the audio session on unmount if there might be an active call
|
|
66
|
+
// This prevents audio session start/stop loops when components remount
|
|
67
|
+
if (((_a = roomRef.current) === null || _a === void 0 ? void 0 : _a.state) !== 'connected' && !connectedRef.current) {
|
|
68
|
+
stopAudioSession();
|
|
69
|
+
}
|
|
70
|
+
else {
|
|
71
|
+
console.log('Skipping audio session stop because room is still connected');
|
|
72
|
+
}
|
|
73
|
+
};
|
|
74
|
+
}, []);
|
|
75
|
+
// Track connection state changes to avoid unmounting while connected
|
|
76
|
+
(0, react_1.useEffect)(() => {
|
|
77
|
+
if (token) {
|
|
78
|
+
activeConnectionToken = token;
|
|
79
|
+
}
|
|
80
|
+
return () => {
|
|
81
|
+
// Only clear token when unmounting with this specific token
|
|
82
|
+
if (activeConnectionToken === token) {
|
|
83
|
+
activeConnectionToken = null;
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
}, [token]);
|
|
87
|
+
// Only render LiveKitRoom when audio session is ready
|
|
88
|
+
if (!audioSessionStarted) {
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
// IMPORTANT: Never return empty fragment when connected!
|
|
92
|
+
// Instead, always render the LiveKitRoom component to maintain the connection
|
|
93
|
+
return ((0, jsx_runtime_1.jsx)(react_native_1.LiveKitRoom, { serverUrl: url, token: token, screen: false, audio: true, onConnected: () => {
|
|
94
|
+
console.log('LiveKitRoom connected');
|
|
95
|
+
connectedRef.current = true;
|
|
96
|
+
onConnected('connected');
|
|
97
|
+
}, room: roomRef.current || undefined, onDisconnected: () => {
|
|
98
|
+
console.log('LiveKitRoom disconnected');
|
|
99
|
+
connectedRef.current = false;
|
|
100
|
+
onDisconnected('disconnected');
|
|
101
|
+
} }));
|
|
102
|
+
};
|
|
103
|
+
exports.default = Voice;
|