react-native-srschat 0.1.61 → 0.1.63

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/lib/commonjs/components/input.js +3 -2
  2. package/lib/commonjs/components/input.js.map +1 -1
  3. package/lib/commonjs/components/productCard.js +1 -1
  4. package/lib/commonjs/components/productCard.js.map +1 -1
  5. package/lib/commonjs/components/voice.js +3 -2
  6. package/lib/commonjs/components/voice.js.map +1 -1
  7. package/lib/commonjs/components/welcomeInput.js +3 -2
  8. package/lib/commonjs/components/welcomeInput.js.map +1 -1
  9. package/lib/commonjs/contexts/AppContext.js +4 -1
  10. package/lib/commonjs/contexts/AppContext.js.map +1 -1
  11. package/lib/commonjs/utils/audioRecorder.js +168 -164
  12. package/lib/commonjs/utils/audioRecorder.js.map +1 -1
  13. package/lib/module/components/input.js +3 -2
  14. package/lib/module/components/input.js.map +1 -1
  15. package/lib/module/components/productCard.js +1 -1
  16. package/lib/module/components/productCard.js.map +1 -1
  17. package/lib/module/components/voice.js +3 -2
  18. package/lib/module/components/voice.js.map +1 -1
  19. package/lib/module/components/welcomeInput.js +3 -2
  20. package/lib/module/components/welcomeInput.js.map +1 -1
  21. package/lib/module/contexts/AppContext.js +4 -1
  22. package/lib/module/contexts/AppContext.js.map +1 -1
  23. package/lib/module/utils/audioRecorder.js +168 -164
  24. package/lib/module/utils/audioRecorder.js.map +1 -1
  25. package/lib/typescript/components/voice.d.ts.map +1 -1
  26. package/lib/typescript/contexts/AppContext.d.ts.map +1 -1
  27. package/lib/typescript/utils/audioRecorder.d.ts.map +1 -1
  28. package/package.json +1 -1
  29. package/src/components/input.js +2 -2
  30. package/src/components/productCard.js +1 -1
  31. package/src/components/voice.js +1 -2
  32. package/src/components/welcomeInput.js +2 -2
  33. package/src/contexts/AppContext.js +2 -1
  34. package/src/utils/audioRecorder.js +184 -167
@@ -7,13 +7,9 @@ import { check, PERMISSIONS, request, RESULTS } from 'react-native-permissions';
7
7
  let resultCallback = null;
8
8
  let partialResultCallback = null;
9
9
  let silenceTimer = null;
10
+ let isCurrentlyRecording = false;
10
11
  let finalResult = '';
11
- const SILENCE_DURATION = 1500;
12
-
13
- const State = { IDLE: 'IDLE', LISTENING: 'LISTENING', FINALIZING: 'FINALIZING' };
14
- let state = State.IDLE;
15
-
16
- let listenersBound = false;
12
+ const SILENCE_DURATION = 1500; // 1.5 seconds of silence before stopping
17
13
 
18
14
  // Add this constant for AsyncStorage key
19
15
  const PERMISSION_STORAGE_KEY = '@voice_permission_status';
@@ -34,9 +30,7 @@ export async function initVoice(onResult, onPartialResult = null) {
34
30
  resultCallback = onResult;
35
31
  partialResultCallback = onPartialResult; // Store partial callback
36
32
  finalResult = '';
37
-
38
- if (listenersBound) return true;
39
-
33
+
40
34
  // Check if Voice module is available
41
35
  if (!Voice) {
42
36
  console.error('Voice module is not available');
@@ -50,109 +44,99 @@ export async function initVoice(onResult, onPartialResult = null) {
50
44
  return false;
51
45
  }
52
46
 
47
+ // Remove any existing listeners
53
48
  Voice.removeAllListeners();
54
49
 
55
50
  // Set up all event listeners
56
- Voice.onSpeechStart = () => {
57
- console.log('[onSpeechStart] Setting state to LISTENING');
58
- state = State.LISTENING;
51
+ Voice.onSpeechStart = (e) => {
52
+ console.log('onSpeechStart: ', e);
53
+ isCurrentlyRecording = true;
59
54
  finalResult = '';
60
- clearSilenceTimer();
55
+
56
+ if (silenceTimer) {
57
+ clearTimeout(silenceTimer);
58
+ silenceTimer = null;
59
+ }
61
60
  };
62
61
 
63
- // Optional: ignore onSpeechRecognized or just log
64
- Voice.onSpeechRecognized = () => {};
65
-
66
- Voice.onSpeechEnd = () => {
67
- console.log('[onSpeechEnd] Speech ended, current state:', state);
68
- clearSilenceTimer();
69
- // Always reset to IDLE when speech ends (sessions should be considered over)
70
- console.log('[onSpeechEnd] Scheduling IDLE reset');
71
- if (Platform.OS === 'android') {
72
- setTimeout(() => {
73
- console.log('[onSpeechEnd] Android timeout - setting state to IDLE');
74
- state = State.IDLE;
75
- }, 800); // Increased delay
76
- } else {
77
- console.log('[onSpeechEnd] iOS - setting state to IDLE immediately');
78
- state = State.IDLE;
62
+ Voice.onSpeechRecognized = (e) => {
63
+ console.log('onSpeechRecognized: ', e);
64
+ if (e.isFinal) {
65
+ console.log('Speech recognition final');
66
+ handleFinalResult();
67
+ }
68
+ };
69
+
70
+ Voice.onSpeechEnd = async (e) => {
71
+ console.log('onSpeechEnd: ', e);
72
+
73
+ if (silenceTimer) {
74
+ clearTimeout(silenceTimer);
75
+ silenceTimer = null;
76
+ }
77
+
78
+ // Only handle final result if we're still recording
79
+ if (isCurrentlyRecording) {
80
+ await handleFinalResult();
79
81
  }
80
82
  };
81
83
 
82
- Voice.onSpeechError = (e) => {
83
- console.log('[onSpeechError] Error occurred, current state:', state, 'error:', e);
84
- clearSilenceTimer();
84
+ Voice.onSpeechError = async (e) => {
85
+ console.log('onSpeechError: ', e);
86
+
87
+ if (silenceTimer) {
88
+ clearTimeout(silenceTimer);
89
+ silenceTimer = null;
90
+ }
91
+
85
92
  const code = e.error?.code?.toString();
86
93
  const msg = e.error?.message || '';
87
94
 
88
- // Handle callback first
95
+ // Handle Android-specific errors
89
96
  if (Platform.OS === 'android' && (code === '7' || code === '5')) {
90
- if (finalResult && resultCallback) resultCallback(finalResult, null);
91
- else if (resultCallback) resultCallback(null, null);
97
+ if (finalResult && resultCallback) {
98
+ resultCallback(finalResult, null);
99
+ }
92
100
  } else if (!msg.includes('No speech detected') && resultCallback) {
93
101
  resultCallback(null, msg);
94
- } else if (resultCallback) {
95
- resultCallback(null, null);
96
102
  }
97
103
 
98
- // Errors end the session immediately, reset to IDLE with delay
99
- console.log('[onSpeechError] Scheduling IDLE reset');
100
- if (Platform.OS === 'android') {
101
- setTimeout(() => {
102
- console.log('[onSpeechError] Android timeout - setting state to IDLE');
103
- state = State.IDLE;
104
- }, 800); // Increased delay to match onSpeechEnd
105
- } else {
106
- console.log('[onSpeechError] iOS - setting state to IDLE immediately');
107
- state = State.IDLE;
108
- }
104
+ await cleanupVoiceSession();
109
105
  };
110
106
 
111
107
  Voice.onSpeechResults = (e) => {
112
- console.log('[onSpeechResults] Results received, current state:', state, 'results:', e);
113
- clearSilenceTimer();
108
+ console.log('onSpeechResults: ', e);
114
109
  if (e.value && e.value.length > 0) {
115
110
  finalResult = e.value[0];
116
- }
117
-
118
- // Only call callback if we haven't already (avoid double-calling)
119
- if (state === State.LISTENING && resultCallback) {
120
- console.log('[onSpeechResults] Calling callback with results');
121
- resultCallback(finalResult, null);
122
- } else {
123
- console.log('[onSpeechResults] Not calling callback - state:', state);
124
- }
125
-
126
- // On Android, we must explicitly stop to avoid session corruption
127
- if (Platform.OS === 'android') {
128
- console.log('[onSpeechResults] Android: Explicitly calling stopRecording()');
129
- stopRecording();
130
- }
131
-
132
- // Results end the session, reset to IDLE with delay
133
- console.log('[onSpeechResults] Scheduling IDLE reset');
134
- if (Platform.OS === 'android') {
135
- setTimeout(() => {
136
- console.log('[onSpeechResults] Android timeout - setting state to IDLE');
137
- state = State.IDLE;
138
- }, 800); // Increased delay
139
- } else {
140
- console.log('[onSpeechResults] iOS - setting state to IDLE immediately');
141
- state = State.IDLE;
111
+ handleSilenceDetection();
142
112
  }
143
113
  };
144
114
 
145
115
  Voice.onSpeechPartialResults = (e) => {
116
+ console.log('onSpeechPartialResults: ', e);
117
+
118
+ if (silenceTimer) {
119
+ clearTimeout(silenceTimer);
120
+ }
121
+
146
122
  if (e.value && e.value.length > 0) {
147
123
  finalResult = e.value[0];
148
- if (partialResultCallback) partialResultCallback(finalResult);
124
+
125
+ // Call partial callback for live transcription
126
+ if (partialResultCallback) {
127
+ partialResultCallback(finalResult);
128
+ }
129
+
149
130
  handleSilenceDetection();
150
131
  }
151
132
  };
152
133
 
153
- if (Platform.OS === 'android') Voice.onSpeechVolumeChanged = () => {};
134
+ if (Platform.OS === 'android') {
135
+ Voice.onSpeechVolumeChanged = (e) => {
136
+ console.log('onSpeechVolumeChanged: ', e);
137
+ };
138
+ }
154
139
 
155
- listenersBound = true;
156
140
  return true;
157
141
  } catch (error) {
158
142
  console.error('Error initializing Voice:', error);
@@ -166,144 +150,163 @@ const handleSilenceDetection = () => {
166
150
  }
167
151
 
168
152
  silenceTimer = setTimeout(async () => {
169
- if (state === State.LISTENING) {
153
+ if (isCurrentlyRecording) {
170
154
  await handleFinalResult();
171
155
  }
172
156
  }, SILENCE_DURATION);
173
157
  };
174
158
 
175
159
  const handleFinalResult = async () => {
176
- console.log('[handleFinalResult] Called, current state:', state);
177
- if (state !== State.LISTENING) {
178
- console.log('[handleFinalResult] State not LISTENING, returning');
179
- return;
180
- }
160
+ if (!isCurrentlyRecording) return;
181
161
 
182
- // Set to FINALIZING first to prevent double callbacks
183
- console.log('[handleFinalResult] Setting state to FINALIZING');
184
- state = State.FINALIZING;
162
+ console.log('handleFinalResult called with:', finalResult);
185
163
 
186
- // Call the callback with results
187
164
  if (finalResult && resultCallback) {
188
- console.log('[handleFinalResult] Calling callback with result:', finalResult);
189
165
  resultCallback(finalResult, null);
190
166
  }
191
167
 
192
- // Now stop recording (this will call Voice.stop())
193
- console.log('[handleFinalResult] Calling stopRecording');
168
+ // Stop recording first
194
169
  await stopRecording();
195
170
  };
196
171
 
197
- const cleanupVoiceSession = () => {
198
- console.log('[cleanupVoiceSession] Called, current state:', state);
199
- finalResult = '';
200
- clearSilenceTimer();
172
+ const cleanupVoiceSession = async () => {
173
+ console.log('cleanupVoiceSession called');
174
+ isCurrentlyRecording = false;
201
175
 
202
- // Add delay before allowing next session on Android
203
- if (Platform.OS === 'android') {
204
- setTimeout(() => {
205
- console.log('[cleanupVoiceSession] Android timeout - setting state to IDLE');
206
- state = State.IDLE;
207
- }, 800);
208
- } else {
209
- console.log('[cleanupVoiceSession] iOS - setting state to IDLE immediately');
210
- state = State.IDLE;
211
- }
212
- };
213
-
214
- const clearSilenceTimer = () => {
215
176
  if (silenceTimer) {
216
177
  clearTimeout(silenceTimer);
217
178
  silenceTimer = null;
218
179
  }
219
- };
220
180
 
221
- export async function startRecording() {
222
181
  try {
223
- console.log('[startRecording] Called, current state:', state);
182
+ // Check if Voice module is available
183
+ if (!Voice) {
184
+ console.log('Voice module not available during cleanup');
185
+ return;
186
+ }
224
187
 
225
- // On Android, destroy any lingering instance before starting
226
- if (Platform.OS === 'android') {
188
+ // Check if still recognizing
189
+ const isRecognizing = await Voice.isRecognizing();
190
+ console.log('Voice.isRecognizing() in cleanup:', isRecognizing);
191
+
192
+ if (isRecognizing) {
227
193
  try {
228
- console.log('[startRecording] Android: Proactively destroying Voice instance');
229
- await Voice.destroy();
230
- await new Promise(r => setTimeout(r, 100)); // Short delay for destroy to complete
194
+ // For iOS, use cancel for immediate termination
195
+ if (Platform.OS === 'ios') {
196
+ await Voice.cancel();
197
+ console.log('Voice.cancel() completed for iOS');
198
+ } else {
199
+ await Voice.stop();
200
+ console.log('Voice.stop() completed for Android');
201
+ }
202
+ await new Promise(resolve => setTimeout(resolve, 100));
231
203
  } catch (e) {
232
- console.log('[startRecording] Proactive destroy failed, may be okay:', e);
204
+ console.log('Error stopping/canceling in cleanup:', e);
233
205
  }
234
206
  }
235
207
 
236
- if (!Voice) {
237
- console.log('[startRecording] Voice not available');
238
- return false;
208
+ // Destroy the instance
209
+ try {
210
+ await Voice.destroy();
211
+ console.log('Voice.destroy() completed');
212
+ await new Promise(resolve => setTimeout(resolve, 200));
213
+ } catch (e) {
214
+ console.log('Error destroying in cleanup:', e);
239
215
  }
240
- if (state !== State.IDLE) {
241
- console.log('[startRecording] State not IDLE, returning false');
216
+
217
+ } catch (error) {
218
+ console.error('Error in cleanupVoiceSession:', error);
219
+ }
220
+
221
+ finalResult = '';
222
+ };
223
+
224
+ export async function startRecording() {
225
+ try {
226
+ console.log('startRecording called');
227
+
228
+ // Check if Voice module is available
229
+ if (!Voice) {
230
+ console.error('Voice module is not available');
242
231
  return false;
243
232
  }
233
+
234
+ // Ensure cleanup of any existing session
235
+ await cleanupVoiceSession();
236
+
237
+ // Small delay to ensure cleanup is complete
238
+ await new Promise(resolve => setTimeout(resolve, 200));
244
239
 
245
240
  const hasPermission = await requestAudioPermission();
246
241
  if (!hasPermission) {
247
242
  console.error('No permission to record audio');
248
243
  return false;
249
244
  }
250
-
251
- const recognizing = await Voice.isRecognizing();
252
- console.log('[startRecording] Voice.isRecognizing():', recognizing);
253
- if (recognizing) {
254
- console.log('[startRecording] Already recognizing, canceling first');
255
- await Voice.cancel();
256
- // Wait longer for cancel to take effect
257
- await new Promise(r => setTimeout(r, 500));
258
-
259
- // Double-check if still recognizing after cancel
260
- const stillRecognizing = await Voice.isRecognizing();
261
- console.log('[startRecording] After cancel, still recognizing:', stillRecognizing);
262
- if (stillRecognizing) {
263
- console.log('[startRecording] Still recognizing after cancel, stopping');
264
- try {
265
- await Voice.stop();
266
- await new Promise(r => setTimeout(r, 300));
267
- } catch (e) {
268
- console.log('[startRecording] Error stopping:', e);
269
- }
270
- }
271
- }
272
245
 
273
- console.log('[startRecording] Calling Voice.start()');
246
+ // Re-initialize listeners each time for iOS stability
247
+ await initVoice(resultCallback, partialResultCallback);
248
+
249
+ // Start recognition
274
250
  await Voice.start('en-US');
275
- console.log('[startRecording] Voice.start() completed, setting state to LISTENING');
276
- state = State.LISTENING;
251
+ console.log('Voice.start() completed');
252
+ isCurrentlyRecording = true;
277
253
  return true;
278
254
  } catch (error) {
279
255
  console.error('Error starting voice recognition:', error);
280
- cleanupVoiceSession();
256
+ await cleanupVoiceSession();
281
257
  return false;
282
258
  }
283
259
  }
284
260
 
285
261
  export async function stopRecording() {
286
262
  try {
287
- console.log('[stopRecording] Called, current state:', state);
288
- // Can be called from LISTENING or FINALIZING state
289
- if ((state !== State.LISTENING && state !== State.FINALIZING) || !Voice) {
290
- console.log('[stopRecording] Invalid state or no Voice, returning');
263
+ console.log('stopRecording called');
264
+
265
+ if (!isCurrentlyRecording || !Voice) {
266
+ console.log('Not recording or Voice not available');
291
267
  return;
292
268
  }
293
-
294
- // Only set to FINALIZING if not already there
295
- if (state === State.LISTENING) {
296
- console.log('[stopRecording] Setting state to FINALIZING');
297
- state = State.FINALIZING;
269
+
270
+ // Set this first to prevent race conditions
271
+ isCurrentlyRecording = false;
272
+
273
+ if (silenceTimer) {
274
+ clearTimeout(silenceTimer);
275
+ silenceTimer = null;
298
276
  }
299
-
300
- clearSilenceTimer();
301
- console.log('[stopRecording] Calling Voice.stop()');
302
- await Voice.stop();
303
- console.log('[stopRecording] Voice.stop() completed');
277
+
278
+ // Platform-specific stop
279
+ try {
280
+ if (Platform.OS === 'ios') {
281
+ // iOS: Use cancel for immediate termination
282
+ await Voice.cancel();
283
+ console.log('Voice.cancel() completed for iOS');
284
+ } else {
285
+ // Android: Use stop
286
+ await Voice.stop();
287
+ console.log('Voice.stop() completed for Android');
288
+ }
289
+
290
+ // Small delay
291
+ await new Promise(resolve => setTimeout(resolve, 100));
292
+ } catch (error) {
293
+ console.log('Error stopping/canceling Voice:', error);
294
+ }
295
+
296
+ // Then destroy
297
+ try {
298
+ await Voice.destroy();
299
+ console.log('Voice.destroy() completed');
300
+ await new Promise(resolve => setTimeout(resolve, 200));
301
+ } catch (error) {
302
+ console.log('Error destroying Voice:', error);
303
+ }
304
+
305
+ // Final cleanup
306
+ finalResult = '';
304
307
  } catch (error) {
305
308
  console.error('Error in stopRecording:', error);
306
- cleanupVoiceSession();
309
+ await cleanupVoiceSession();
307
310
  }
308
311
  }
309
312
 
@@ -311,10 +314,10 @@ export async function cancelRecording() {
311
314
  try {
312
315
  if (!Voice) return;
313
316
  await Voice.cancel();
314
- cleanupVoiceSession();
317
+ await cleanupVoiceSession();
315
318
  } catch (error) {
316
319
  console.error('Error canceling voice recognition:', error);
317
- cleanupVoiceSession();
320
+ await cleanupVoiceSession();
318
321
  }
319
322
  }
320
323
 
@@ -407,5 +410,19 @@ export function resetStoredPermission() {
407
410
  }
408
411
 
409
412
  export function cleanup() {
410
- cleanupVoiceSession();
413
+ if (!Voice) {
414
+ console.log('Voice module not available during cleanup');
415
+ return;
416
+ }
417
+
418
+ Voice.destroy().then(() => {
419
+ Voice.removeAllListeners();
420
+ cleanupVoiceSession();
421
+ }).catch(error => {
422
+ console.error('Error in cleanup:', error);
423
+ // Try one more time
424
+ if (Voice) {
425
+ Voice.destroy().catch(e => console.error('Final cleanup attempt failed:', e));
426
+ }
427
+ });
411
428
  }