react-native-srschat 0.1.18 → 0.1.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/README.md +1 -0
  2. package/lib/commonjs/components/email.js +5 -4
  3. package/lib/commonjs/components/email.js.map +1 -1
  4. package/lib/commonjs/components/header.js +11 -20
  5. package/lib/commonjs/components/header.js.map +1 -1
  6. package/lib/commonjs/components/input.js +9 -2
  7. package/lib/commonjs/components/input.js.map +1 -1
  8. package/lib/commonjs/components/productCard.js +5 -1
  9. package/lib/commonjs/components/productCard.js.map +1 -1
  10. package/lib/commonjs/components/progressCircle.js +99 -0
  11. package/lib/commonjs/components/progressCircle.js.map +1 -0
  12. package/lib/commonjs/components/voice.js +87 -0
  13. package/lib/commonjs/components/voice.js.map +1 -0
  14. package/lib/commonjs/components/welcomeInput.js +9 -2
  15. package/lib/commonjs/components/welcomeInput.js.map +1 -1
  16. package/lib/commonjs/hooks/stream.js +9 -9
  17. package/lib/commonjs/hooks/stream.js.map +1 -1
  18. package/lib/commonjs/layout/layout.js +3 -3
  19. package/lib/commonjs/layout/layout.js.map +1 -1
  20. package/lib/commonjs/layout/window.js +82 -4
  21. package/lib/commonjs/layout/window.js.map +1 -1
  22. package/lib/commonjs/utils/audioRecorder.js +97 -0
  23. package/lib/commonjs/utils/audioRecorder.js.map +1 -0
  24. package/lib/commonjs/utils/textToSpeech.js +45 -0
  25. package/lib/commonjs/utils/textToSpeech.js.map +1 -0
  26. package/lib/module/components/email.js +5 -4
  27. package/lib/module/components/email.js.map +1 -1
  28. package/lib/module/components/header.js +11 -20
  29. package/lib/module/components/header.js.map +1 -1
  30. package/lib/module/components/input.js +9 -2
  31. package/lib/module/components/input.js.map +1 -1
  32. package/lib/module/components/productCard.js +4 -1
  33. package/lib/module/components/productCard.js.map +1 -1
  34. package/lib/module/components/progressCircle.js +90 -0
  35. package/lib/module/components/progressCircle.js.map +1 -0
  36. package/lib/module/components/voice.js +76 -0
  37. package/lib/module/components/voice.js.map +1 -0
  38. package/lib/module/components/welcomeInput.js +9 -2
  39. package/lib/module/components/welcomeInput.js.map +1 -1
  40. package/lib/module/hooks/stream.js +9 -9
  41. package/lib/module/hooks/stream.js.map +1 -1
  42. package/lib/module/layout/layout.js +3 -3
  43. package/lib/module/layout/layout.js.map +1 -1
  44. package/lib/module/layout/window.js +82 -4
  45. package/lib/module/layout/window.js.map +1 -1
  46. package/lib/module/utils/audioRecorder.js +88 -0
  47. package/lib/module/utils/audioRecorder.js.map +1 -0
  48. package/lib/module/utils/textToSpeech.js +34 -0
  49. package/lib/module/utils/textToSpeech.js.map +1 -0
  50. package/lib/typescript/components/input.d.ts.map +1 -1
  51. package/lib/typescript/components/productCard.d.ts.map +1 -1
  52. package/lib/typescript/components/progressCircle.d.ts +3 -0
  53. package/lib/typescript/components/progressCircle.d.ts.map +1 -0
  54. package/lib/typescript/components/voice.d.ts +3 -0
  55. package/lib/typescript/components/voice.d.ts.map +1 -0
  56. package/lib/typescript/components/welcomeInput.d.ts.map +1 -1
  57. package/lib/typescript/layout/window.d.ts.map +1 -1
  58. package/lib/typescript/utils/audioRecorder.d.ts +4 -0
  59. package/lib/typescript/utils/audioRecorder.d.ts.map +1 -0
  60. package/lib/typescript/utils/textToSpeech.d.ts +2 -0
  61. package/lib/typescript/utils/textToSpeech.d.ts.map +1 -0
  62. package/package.json +4 -3
  63. package/src/components/email.js +6 -6
  64. package/src/components/header.js +11 -10
  65. package/src/components/input.js +5 -3
  66. package/src/components/productCard.js +3 -0
  67. package/src/components/progressCircle.js +86 -0
  68. package/src/components/voice.js +84 -0
  69. package/src/components/welcomeInput.js +5 -3
  70. package/src/hooks/stream.js +2 -2
  71. package/src/layout/layout.js +3 -3
  72. package/src/layout/window.js +61 -4
  73. package/src/utils/audioRecorder.js +99 -0
  74. package/src/utils/textToSpeech.js +38 -0
@@ -0,0 +1,84 @@
1
+ // VoiceButton.js
2
+ import React, { useState, useContext } from 'react';
3
+ import { TouchableOpacity, ActivityIndicator, View, StyleSheet } from 'react-native';
4
+ import Ionicons from 'react-native-vector-icons/Ionicons';
5
+ import axios from 'axios';
6
+
7
+ import { startRecording, stopRecording, requestAudioPermission } from '../utils/audioRecorder';
8
+ import { AppContext } from '../contexts/AppContext';
9
+
10
+ export const VoiceButton = () => {
11
+ const { data, handleVoiceSend } = useContext(AppContext);
12
+ const [isListening, setIsListening] = useState(false);
13
+ const [loading, setLoading] = useState(false);
14
+
15
+ const toggleRecording = async () => {
16
+ const hasPermission = await requestAudioPermission();
17
+
18
+ if (!hasPermission) {
19
+ console.error('Permission denied');
20
+ return;
21
+ }
22
+
23
+ if (!isListening) {
24
+ startRecording(handleStopRecording);
25
+ setIsListening(true);
26
+ } else {
27
+ handleStopRecording();
28
+ }
29
+ };
30
+
31
+ const handleStopRecording = async () => {
32
+ setLoading(true);
33
+ const audioPath = await stopRecording();
34
+ const transcription = await transcribeAudio(audioPath);
35
+ setLoading(false);
36
+ setIsListening(false);
37
+ handleVoiceSend(audioPath, transcription);
38
+ };
39
+
40
+ const transcribeAudio = async (audioPath) => {
41
+ try {
42
+ const formData = new FormData();
43
+ formData.append('file', {
44
+ uri: audioPath,
45
+ type: 'audio/wav',
46
+ name: 'audio.wav',
47
+ });
48
+ formData.append('model', 'whisper-1');
49
+
50
+ const response = await axios.post(
51
+ 'https://api.openai.com/v1/audio/transcriptions',
52
+ formData,
53
+ {
54
+ headers: {
55
+ Authorization: `Bearer ${data.openai_key}`,
56
+ 'Content-Type': 'multipart/form-data',
57
+ },
58
+ }
59
+ );
60
+ return response.data.text;
61
+ } catch (error) {
62
+ console.error('Error transcribing audio:', error);
63
+ return '';
64
+ }
65
+ };
66
+
67
+ return (
68
+ <TouchableOpacity style={styles.button} onPress={toggleRecording} disabled={loading}>
69
+ {loading ? (
70
+ <ActivityIndicator size="small" color="#8E8E93" />
71
+ ) : (
72
+ <Ionicons name={isListening ? 'stop-circle' : 'mic-outline'} size={24} color="#8E8E93" />
73
+ )}
74
+ </TouchableOpacity>
75
+ );
76
+ };
77
+
78
+ const styles = StyleSheet.create({
79
+ button: {
80
+ justifyContent: 'center',
81
+ alignItems: 'center',
82
+ },
83
+ });
84
+
@@ -14,7 +14,7 @@ import { AppContext } from '../contexts/AppContext';
14
14
  import Ionicons from 'react-native-vector-icons/Ionicons';
15
15
 
16
16
  export const WelcomeInput = ({ onProductCardClick, onAddToCartClick }) => {
17
- const { handleSend, input, setInput, showModal, theme } = useContext(AppContext);
17
+ const { handleSend, input, setInput, showModal, theme, data } = useContext(AppContext);
18
18
 
19
19
  return (
20
20
  <View style={styles.inputContainer}>
@@ -26,9 +26,11 @@ export const WelcomeInput = ({ onProductCardClick, onAddToCartClick }) => {
26
26
  placeholderTextColor="#999"
27
27
  multiline
28
28
  />
29
- {/* <TouchableOpacity style={styles.inputButton}>
29
+ {data.openai_key &&
30
+ <TouchableOpacity style={styles.inputButton}>
30
31
  <Ionicons name="mic-outline" size={24} color="#8E8E93" />
31
- </TouchableOpacity> */}
32
+ </TouchableOpacity>
33
+ }
32
34
  <TouchableOpacity
33
35
  style={styles.sendButton}
34
36
  onPress={() => handleSend(input)}
@@ -80,7 +80,7 @@ export function useWebSocketMessage() {
80
80
  console.log(response)
81
81
  }
82
82
  switch (response.type) {
83
- /* case 'middle_message':
83
+ case 'middle_message':
84
84
  const middleMessage = {
85
85
  type: "middle",
86
86
  text: response.message,
@@ -88,7 +88,7 @@ export function useWebSocketMessage() {
88
88
  product_cards: "False",
89
89
  }
90
90
  setMessages([...messages, middleMessage])
91
- break; */
91
+ break;
92
92
  case 'message':
93
93
  if (response.product_cards == "False" || response.product_cards == false ) {
94
94
  setGhostMessage(false);
@@ -133,12 +133,12 @@ const styles = StyleSheet.create({
133
133
  left: 0,
134
134
  right: 0,
135
135
  bottom: 0,
136
- top: 140,
136
+ top: 0,
137
137
  pointerEvents: 'box-none',
138
- borderTopWidth: 1,
138
+ /* borderTopWidth: 1,
139
139
  borderTopColor: '#DDD',
140
140
  borderTopLeftRadius: 16,
141
- borderTopRightRadius: 16,
141
+ borderTopRightRadius: 16, */
142
142
  overflow: 'hidden',
143
143
  },
144
144
  outsideTouchable: {
@@ -11,10 +11,11 @@ import { useWebSocketMessage } from '../hooks/stream';
11
11
  import { ProductCard } from '../components/productCard'
12
12
  import Markdown from 'react-native-markdown-display';
13
13
  import { Feedback } from '../components/feedback';
14
+ import { ProgressCircle } from '../components/progressCircle';
14
15
 
15
16
  export const ChatWindow = ({ panHandlers }) => {
16
17
  const { handleSend, messages, input, setInput, ghostMessage, handleButtonClick,
17
- onProductCardClick, onAddToCartClick, uiConfig
18
+ onProductCardClick, onAddToCartClick, uiConfig, ghostCard, typingIndicator
18
19
  } = useContext(AppContext);
19
20
 
20
21
  const scrollViewRef = useRef(null);
@@ -80,6 +81,15 @@ export const ChatWindow = ({ panHandlers }) => {
80
81
  </View>
81
82
  )}
82
83
 
84
+ {msg.type == "middle" && (
85
+ <View style={[styles.middleMessageBubble, styles.middleMessage]}>
86
+ <Ionicons name="sparkles-outline" size={20} style={{marginRight: 10}}/>
87
+ <Markdown style={{ body: { color: msg.type === "user" ? "#ffffff" : "#161616",fontSize: 16, lineHeight: 22 }}}>
88
+ {msg.text}
89
+ </Markdown>
90
+ </View>
91
+ )}
92
+
83
93
  {msg.products && msg.products.length > 0 &&
84
94
  msg.products.map((prod, index) => (
85
95
  <View key={index} style={styles.productCardWrapper}>
@@ -87,8 +97,6 @@ export const ChatWindow = ({ panHandlers }) => {
87
97
  </View>
88
98
  ))}
89
99
 
90
- {/* "https://player.vimeo.com/video/857307005?h=441fb14207&badge=0&autopause=0&player_id=0&app_id=58479"
91
- */}
92
100
  {msg.resource && msg.resource.length > 0 && msg.resource_type == "video" &&
93
101
  <TouchableOpacity style={styles.resourceButton} onPress={() => openLink(msg.resource)}>
94
102
  <Text style={styles.resourceText}>Watch Video</Text>
@@ -110,6 +118,18 @@ export const ChatWindow = ({ panHandlers }) => {
110
118
  <Animated.View style={[styles.ghostBar, styles.ghostBarMedium, { opacity: fadeAnim }]} />
111
119
  </View>
112
120
  )}
121
+
122
+ {ghostCard && i === messages.length - 1 && (
123
+ <View style={styles.ghostCardContainer}>
124
+ <View style={styles.ghostSquare} />
125
+ <View style={styles.ghostBarsContainer}>
126
+ <Animated.View style={[styles.ghostBar, styles.ghostBarShort, { opacity: fadeAnim }]} />
127
+ <Animated.View style={[styles.ghostBar, styles.ghostBarLong, { opacity: fadeAnim }]} />
128
+ <Animated.View style={[styles.ghostBar, styles.ghostBarMedium, { opacity: fadeAnim }]} />
129
+ </View>
130
+ </View>
131
+ )}
132
+
113
133
  </View>
114
134
  ))}
115
135
  </KeyboardAwareScrollView>
@@ -124,7 +144,10 @@ export const ChatWindow = ({ panHandlers }) => {
124
144
  onAddToCartClick={onAddToCartClick}
125
145
  />
126
146
  }
127
- <ChatInput/>
147
+ {!typingIndicator ?
148
+ <ChatInput/> :
149
+ <ProgressCircle />
150
+ }
128
151
  </KeyboardAvoidingView>
129
152
  </View>
130
153
  );
@@ -199,6 +222,26 @@ const styles = StyleSheet.create({
199
222
  ghostBarLong: {
200
223
  width: "100%",
201
224
  },
225
+ ghostCardContainer: {
226
+ flexDirection: "row", // Arrange elements in a row
227
+ alignItems: "center", // Vertically align items
228
+ width: "100%",
229
+ backgroundColor: "#FFFFFF",
230
+ borderRadius: 10,
231
+ borderTopLeftRadius: 0,
232
+ padding: 14,
233
+ marginVertical: 5,
234
+ },
235
+ ghostSquare: {
236
+ width: "25%", // Takes up 25% of the container
237
+ aspectRatio: 1, // Makes it a square
238
+ backgroundColor: "#ebebeb", // Adjust color if needed
239
+ borderRadius: 8,
240
+ },
241
+ ghostBarsContainer: {
242
+ width: "75%", // Takes up 75% of the container
243
+ paddingLeft: 10, // Adds some spacing from the square
244
+ },
202
245
  resourceButton: {
203
246
  flexDirection: 'row',
204
247
  alignItems: 'center',
@@ -214,6 +257,20 @@ const styles = StyleSheet.create({
214
257
  fontSize: 16,
215
258
  marginRight: 8,
216
259
  },
260
+ middleMessageBubble:{
261
+ padding: 6,
262
+ paddingHorizontal: 16,
263
+ borderRadius: 12,
264
+ marginBottom: 5,
265
+ flexDirection: 'row',
266
+ alignItems: 'center'
267
+ },
268
+ middleMessage:{
269
+ color: '#161616',
270
+ alignSelf: 'flex-start',
271
+ backgroundColor: '#e0f4fc', //'#e0f4fc',
272
+ width: '100%',
273
+ }
217
274
  });
218
275
 
219
276
  {/* <Testing
@@ -0,0 +1,99 @@
1
+ // audioRecorder.js
2
+ import AudioRecorderPlayer from 'react-native-audio-recorder-player';
3
+ import { Platform, PermissionsAndroid } from 'react-native';
4
+ import { NativeModules } from 'react-native';
5
+
6
+ const audioRecorderPlayer = new AudioRecorderPlayer();
7
+
8
+ let silenceTimer = null;
9
+ let rmsValues = [];
10
+ const SILENCE_RMS_THRESHOLD = 0.02;
11
+ const SILENCE_LENGTH_MS = 2000;
12
+
13
+ export async function startRecording(handleStopRecording) {
14
+
15
+ audioRecorderPlayer = new AudioRecorderPlayer();
16
+
17
+ rmsValues = [];
18
+ audioRecorderPlayer.startRecorder();
19
+
20
+ audioRecorderPlayer.addRecordBackListener((e) => {
21
+ const rms = Math.sqrt(
22
+ e.currentMetering.reduce((sum, value) => sum + Math.pow(value / 100, 2), 0) / e.currentMetering.length
23
+ );
24
+ rmsValues.push(rms);
25
+
26
+ // Detect silence
27
+ if (rms > SILENCE_RMS_THRESHOLD) {
28
+ if (silenceTimer) {
29
+ clearTimeout(silenceTimer);
30
+ silenceTimer = null;
31
+ }
32
+ } else if (!silenceTimer) {
33
+ silenceTimer = setTimeout(() => {
34
+ handleStopRecording();
35
+ }, SILENCE_LENGTH_MS);
36
+ }
37
+ });
38
+ console.log('Recording started...');
39
+ }
40
+
41
+ export async function stopRecording() {
42
+ if (!audioRecorderPlayer) {
43
+ console.error('AudioRecorderPlayer instance is null');
44
+ return null;
45
+ }
46
+ if (silenceTimer) {
47
+ clearTimeout(silenceTimer);
48
+ }
49
+ const result = await audioRecorderPlayer.stopRecorder();
50
+ console.log('Recording stopped:', result);
51
+ return result;
52
+ }
53
+
54
+ export async function requestAudioPermission() {
55
+ if (Platform.OS === 'android') {
56
+ return await requestAndroidPermission();
57
+ } else if (Platform.OS === 'ios') {
58
+ return await requestIOSPermission();
59
+ }
60
+ }
61
+
62
+ // ✅ Android: Request Microphone Permission
63
+ async function requestAndroidPermission() {
64
+ try {
65
+ const granted = await PermissionsAndroid.request(
66
+ PermissionsAndroid.PERMISSIONS.RECORD_AUDIO,
67
+ {
68
+ title: 'Microphone Permission',
69
+ message: 'This app needs access to your microphone to record audio.',
70
+ buttonPositive: 'OK',
71
+ buttonNegative: 'Cancel',
72
+ }
73
+ );
74
+
75
+ return granted === PermissionsAndroid.RESULTS.GRANTED;
76
+ } catch (error) {
77
+ console.error('Error requesting microphone permission:', error);
78
+ return false;
79
+ }
80
+ }
81
+
82
+ // ✅ iOS: Request Microphone Permission
83
+ async function requestIOSPermission() {
84
+ try {
85
+ const { AVAudioSession } = NativeModules;
86
+ if (AVAudioSession) {
87
+ await AVAudioSession.requestRecordPermission((granted) => {
88
+ console.log('iOS Microphone Permission:', granted);
89
+ return granted;
90
+ });
91
+ } else {
92
+ console.warn('AVAudioSession not available');
93
+ return false;
94
+ }
95
+ } catch (error) {
96
+ console.error('Error requesting microphone permission on iOS:', error);
97
+ return false;
98
+ }
99
+ }
@@ -0,0 +1,38 @@
1
+ // textToSpeech.js
2
+ import React,{ useState, useContext} from 'react';
3
+ import axios from 'axios';
4
+ import Sound from 'react-native-sound';
5
+ import { AppContext } from '../contexts/AppContext';
6
+
7
+ export const TextToSpeech = async (inputText) => {
8
+ const { data } = useContext(AppContext)
9
+ try {
10
+ const response = await axios.post(
11
+ 'https://api.openai.com/v1/audio/speech',
12
+ {
13
+ model: 'tts-1',
14
+ voice: 'alloy',
15
+ input: inputText,
16
+ },
17
+ {
18
+ headers: {
19
+ Authorization: `Bearer ${data.openai_key}`,
20
+ 'Content-Type': 'application/json',
21
+ },
22
+ responseType: 'arraybuffer',
23
+ }
24
+ );
25
+
26
+ const audioFile = `data:audio/mp3;base64,${Buffer.from(response.data).toString('base64')}`;
27
+
28
+ const sound = new Sound(audioFile, null, (error) => {
29
+ if (error) {
30
+ console.error('Error playing sound:', error);
31
+ } else {
32
+ sound.play();
33
+ }
34
+ });
35
+ } catch (error) {
36
+ console.error('Error generating TTS:', error);
37
+ }
38
+ };