react-native-srschat 0.1.59 → 0.1.61

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,13 +3,17 @@
3
3
  import { Platform } from 'react-native';
4
4
  import Voice from '@react-native-voice/voice';
5
5
  import { check, PERMISSIONS, request, RESULTS } from 'react-native-permissions';
6
- import useAsyncStorage from '../hooks/useAsyncStorage';
7
6
 
8
7
  let resultCallback = null;
8
+ let partialResultCallback = null;
9
9
  let silenceTimer = null;
10
- let isCurrentlyRecording = false;
11
10
  let finalResult = '';
12
- const SILENCE_DURATION = 1500; // 1.5 seconds of silence before stopping
11
+ const SILENCE_DURATION = 1500;
12
+
13
+ const State = { IDLE: 'IDLE', LISTENING: 'LISTENING', FINALIZING: 'FINALIZING' };
14
+ let state = State.IDLE;
15
+
16
+ let listenersBound = false;
13
17
 
14
18
  // Add this constant for AsyncStorage key
15
19
  const PERMISSION_STORAGE_KEY = '@voice_permission_status';
@@ -24,12 +28,15 @@ export function setPermissionStatusHandlers(getter, setter) {
24
28
  permissionStatusSetter = setter;
25
29
  }
26
30
 
27
- // Initialize Voice handlers
28
- export async function initVoice(onResult) {
31
+ // Initialize Voice handlers - modified to support live transcription
32
+ export async function initVoice(onResult, onPartialResult = null) {
29
33
  try {
30
34
  resultCallback = onResult;
35
+ partialResultCallback = onPartialResult; // Store partial callback
31
36
  finalResult = '';
32
-
37
+
38
+ if (listenersBound) return true;
39
+
33
40
  // Check if Voice module is available
34
41
  if (!Voice) {
35
42
  console.error('Voice module is not available');
@@ -43,91 +50,109 @@ export async function initVoice(onResult) {
43
50
  return false;
44
51
  }
45
52
 
53
+ Voice.removeAllListeners();
54
+
46
55
  // Set up all event listeners
47
- Voice.onSpeechStart = (e) => {
48
- console.log('onSpeechStart: ', e);
49
- isCurrentlyRecording = true;
56
+ Voice.onSpeechStart = () => {
57
+ console.log('[onSpeechStart] Setting state to LISTENING');
58
+ state = State.LISTENING;
50
59
  finalResult = '';
51
-
52
- if (silenceTimer) {
53
- clearTimeout(silenceTimer);
54
- silenceTimer = null;
55
- }
60
+ clearSilenceTimer();
56
61
  };
57
62
 
58
- Voice.onSpeechRecognized = (e) => {
59
- console.log('onSpeechRecognized: ', e);
60
- if (e.isFinal) {
61
- console.log('Speech recognition final');
62
- handleFinalResult();
63
+ // Optional: ignore onSpeechRecognized or just log
64
+ Voice.onSpeechRecognized = () => {};
65
+
66
+ Voice.onSpeechEnd = () => {
67
+ console.log('[onSpeechEnd] Speech ended, current state:', state);
68
+ clearSilenceTimer();
69
+ // Always reset to IDLE when speech ends (sessions should be considered over)
70
+ console.log('[onSpeechEnd] Scheduling IDLE reset');
71
+ if (Platform.OS === 'android') {
72
+ setTimeout(() => {
73
+ console.log('[onSpeechEnd] Android timeout - setting state to IDLE');
74
+ state = State.IDLE;
75
+ }, 800); // Increased delay
76
+ } else {
77
+ console.log('[onSpeechEnd] iOS - setting state to IDLE immediately');
78
+ state = State.IDLE;
63
79
  }
64
80
  };
65
81
 
66
- Voice.onSpeechEnd = async (e) => {
67
- console.log('onSpeechEnd: ', e);
82
+ Voice.onSpeechError = (e) => {
83
+ console.log('[onSpeechError] Error occurred, current state:', state, 'error:', e);
84
+ clearSilenceTimer();
85
+ const code = e.error?.code?.toString();
86
+ const msg = e.error?.message || '';
68
87
 
69
- if (silenceTimer) {
70
- clearTimeout(silenceTimer);
71
- silenceTimer = null;
72
- }
73
-
74
- // Only handle final result if we're still recording
75
- if (isCurrentlyRecording) {
76
- await handleFinalResult();
88
+ // Handle callback first
89
+ if (Platform.OS === 'android' && (code === '7' || code === '5')) {
90
+ if (finalResult && resultCallback) resultCallback(finalResult, null);
91
+ else if (resultCallback) resultCallback(null, null);
92
+ } else if (!msg.includes('No speech detected') && resultCallback) {
93
+ resultCallback(null, msg);
94
+ } else if (resultCallback) {
95
+ resultCallback(null, null);
77
96
  }
78
- };
79
-
80
- Voice.onSpeechError = async (e) => {
81
- // console.error('onSpeechError: ', e);
82
97
 
83
- if (silenceTimer) {
84
- clearTimeout(silenceTimer);
85
- silenceTimer = null;
98
+ // Errors end the session immediately, reset to IDLE with delay
99
+ console.log('[onSpeechError] Scheduling IDLE reset');
100
+ if (Platform.OS === 'android') {
101
+ setTimeout(() => {
102
+ console.log('[onSpeechError] Android timeout - setting state to IDLE');
103
+ state = State.IDLE;
104
+ }, 800); // Increased delay to match onSpeechEnd
105
+ } else {
106
+ console.log('[onSpeechError] iOS - setting state to IDLE immediately');
107
+ state = State.IDLE;
86
108
  }
87
-
88
- // Check for "No speech detected" error
89
- const isNoSpeechError = e.error?.code === "recognition_fail" &&
90
- e.error?.message?.includes("No speech detected");
91
-
92
- await cleanupVoiceSession();
93
-
94
- // Only send error to callback if it's not a "No speech detected" error
95
- // if (!isNoSpeechError) {
96
- // resultCallback(null, e.error?.message || 'Speech recognition error');
97
- // } else {
98
- // console.log('No speech detected, ignoring error');
99
- // // Optionally, call the callback with null parameters or a special indicator
100
- // resultCallback(null, null); // This won't trigger an error alert in the component
101
- // }
102
109
  };
103
110
 
104
111
  Voice.onSpeechResults = (e) => {
105
- console.log('onSpeechResults: ', e);
112
+ console.log('[onSpeechResults] Results received, current state:', state, 'results:', e);
113
+ clearSilenceTimer();
106
114
  if (e.value && e.value.length > 0) {
107
115
  finalResult = e.value[0];
108
- handleSilenceDetection();
109
116
  }
110
- };
111
-
112
- Voice.onSpeechPartialResults = (e) => {
113
- console.log('onSpeechPartialResults: ', e);
114
117
 
115
- if (silenceTimer) {
116
- clearTimeout(silenceTimer);
118
+ // Only call callback if we haven't already (avoid double-calling)
119
+ if (state === State.LISTENING && resultCallback) {
120
+ console.log('[onSpeechResults] Calling callback with results');
121
+ resultCallback(finalResult, null);
122
+ } else {
123
+ console.log('[onSpeechResults] Not calling callback - state:', state);
117
124
  }
118
125
 
126
+ // On Android, we must explicitly stop to avoid session corruption
127
+ if (Platform.OS === 'android') {
128
+ console.log('[onSpeechResults] Android: Explicitly calling stopRecording()');
129
+ stopRecording();
130
+ }
131
+
132
+ // Results end the session, reset to IDLE with delay
133
+ console.log('[onSpeechResults] Scheduling IDLE reset');
134
+ if (Platform.OS === 'android') {
135
+ setTimeout(() => {
136
+ console.log('[onSpeechResults] Android timeout - setting state to IDLE');
137
+ state = State.IDLE;
138
+ }, 800); // Increased delay
139
+ } else {
140
+ console.log('[onSpeechResults] iOS - setting state to IDLE immediately');
141
+ state = State.IDLE;
142
+ }
143
+ };
144
+
145
+ Voice.onSpeechPartialResults = (e) => {
119
146
  if (e.value && e.value.length > 0) {
120
147
  finalResult = e.value[0];
148
+ if (partialResultCallback) partialResultCallback(finalResult);
121
149
  handleSilenceDetection();
122
150
  }
123
151
  };
124
152
 
125
- if (Platform.OS === 'android') {
126
- Voice.onSpeechVolumeChanged = (e) => {
127
- console.log('onSpeechVolumeChanged: ', e);
128
- };
129
- }
153
+ if (Platform.OS === 'android') Voice.onSpeechVolumeChanged = () => {};
130
154
 
155
+ listenersBound = true;
131
156
  return true;
132
157
  } catch (error) {
133
158
  console.error('Error initializing Voice:', error);
@@ -141,139 +166,144 @@ const handleSilenceDetection = () => {
141
166
  }
142
167
 
143
168
  silenceTimer = setTimeout(async () => {
144
- if (isCurrentlyRecording) {
169
+ if (state === State.LISTENING) {
145
170
  await handleFinalResult();
146
171
  }
147
172
  }, SILENCE_DURATION);
148
173
  };
149
174
 
150
175
  const handleFinalResult = async () => {
151
- if (!isCurrentlyRecording) return;
176
+ console.log('[handleFinalResult] Called, current state:', state);
177
+ if (state !== State.LISTENING) {
178
+ console.log('[handleFinalResult] State not LISTENING, returning');
179
+ return;
180
+ }
181
+
182
+ // Set to FINALIZING first to prevent double callbacks
183
+ console.log('[handleFinalResult] Setting state to FINALIZING');
184
+ state = State.FINALIZING;
152
185
 
153
- if (finalResult) {
154
- resultCallback(finalResult);
186
+ // Call the callback with results
187
+ if (finalResult && resultCallback) {
188
+ console.log('[handleFinalResult] Calling callback with result:', finalResult);
189
+ resultCallback(finalResult, null);
155
190
  }
156
191
 
157
- // Stop recording first
192
+ // Now stop recording (this will call Voice.stop())
193
+ console.log('[handleFinalResult] Calling stopRecording');
158
194
  await stopRecording();
159
-
160
- // Then clean up the session
161
- await cleanupVoiceSession();
162
195
  };
163
196
 
164
- const cleanupVoiceSession = async () => {
165
- isCurrentlyRecording = false;
197
+ const cleanupVoiceSession = () => {
198
+ console.log('[cleanupVoiceSession] Called, current state:', state);
199
+ finalResult = '';
200
+ clearSilenceTimer();
166
201
 
202
+ // Add delay before allowing next session on Android
203
+ if (Platform.OS === 'android') {
204
+ setTimeout(() => {
205
+ console.log('[cleanupVoiceSession] Android timeout - setting state to IDLE');
206
+ state = State.IDLE;
207
+ }, 800);
208
+ } else {
209
+ console.log('[cleanupVoiceSession] iOS - setting state to IDLE immediately');
210
+ state = State.IDLE;
211
+ }
212
+ };
213
+
214
+ const clearSilenceTimer = () => {
167
215
  if (silenceTimer) {
168
216
  clearTimeout(silenceTimer);
169
217
  silenceTimer = null;
170
218
  }
219
+ };
171
220
 
221
+ export async function startRecording() {
172
222
  try {
173
- // Check if Voice module is available
174
- if (!Voice) {
175
- console.log('Voice module not available during cleanup');
176
- return;
177
- }
223
+ console.log('[startRecording] Called, current state:', state);
178
224
 
179
- // First try to stop if still recognizing
180
- const isRecognizing = await Voice.isRecognizing();
181
- if (isRecognizing) {
225
+ // On Android, destroy any lingering instance before starting
226
+ if (Platform.OS === 'android') {
182
227
  try {
183
- await Voice.stop();
184
- await new Promise(resolve => setTimeout(resolve, 100));
185
- } catch (e) {
186
- console.error('Error stopping in cleanup:', e);
187
- }
188
- }
189
-
190
- // Then force destroy
191
- await Voice.destroy();
192
- await new Promise(resolve => setTimeout(resolve, 300));
193
-
194
- // Double check and force destroy again if needed
195
- const stillRecognizing = await Voice.isRecognizing();
196
- if (stillRecognizing) {
197
- await Voice.destroy();
198
- await new Promise(resolve => setTimeout(resolve, 300));
199
- }
200
- } catch (error) {
201
- console.error('Error in cleanupVoiceSession:', error);
202
- // Final attempt to destroy on error
203
- try {
204
- if (Voice) {
228
+ console.log('[startRecording] Android: Proactively destroying Voice instance');
205
229
  await Voice.destroy();
230
+ await new Promise(r => setTimeout(r, 100)); // Short delay for destroy to complete
231
+ } catch (e) {
232
+ console.log('[startRecording] Proactive destroy failed, may be okay:', e);
206
233
  }
207
- } catch (e) {
208
- console.error('Final destroy attempt failed:', e);
209
234
  }
210
- }
211
235
 
212
- finalResult = '';
213
- };
214
-
215
- export async function startRecording() {
216
- try {
217
- // Check if Voice module is available
218
236
  if (!Voice) {
219
- console.error('Voice module is not available');
237
+ console.log('[startRecording] Voice not available');
238
+ return false;
239
+ }
240
+ if (state !== State.IDLE) {
241
+ console.log('[startRecording] State not IDLE, returning false');
220
242
  return false;
221
243
  }
222
-
223
- // Ensure cleanup of any existing session
224
- await cleanupVoiceSession();
225
244
 
226
245
  const hasPermission = await requestAudioPermission();
227
246
  if (!hasPermission) {
228
247
  console.error('No permission to record audio');
229
248
  return false;
230
249
  }
250
+
251
+ const recognizing = await Voice.isRecognizing();
252
+ console.log('[startRecording] Voice.isRecognizing():', recognizing);
253
+ if (recognizing) {
254
+ console.log('[startRecording] Already recognizing, canceling first');
255
+ await Voice.cancel();
256
+ // Wait longer for cancel to take effect
257
+ await new Promise(r => setTimeout(r, 500));
258
+
259
+ // Double-check if still recognizing after cancel
260
+ const stillRecognizing = await Voice.isRecognizing();
261
+ console.log('[startRecording] After cancel, still recognizing:', stillRecognizing);
262
+ if (stillRecognizing) {
263
+ console.log('[startRecording] Still recognizing after cancel, stopping');
264
+ try {
265
+ await Voice.stop();
266
+ await new Promise(r => setTimeout(r, 300));
267
+ } catch (e) {
268
+ console.log('[startRecording] Error stopping:', e);
269
+ }
270
+ }
271
+ }
231
272
 
273
+ console.log('[startRecording] Calling Voice.start()');
232
274
  await Voice.start('en-US');
233
- isCurrentlyRecording = true;
275
+ console.log('[startRecording] Voice.start() completed, setting state to LISTENING');
276
+ state = State.LISTENING;
234
277
  return true;
235
278
  } catch (error) {
236
279
  console.error('Error starting voice recognition:', error);
237
- await cleanupVoiceSession();
280
+ cleanupVoiceSession();
238
281
  return false;
239
282
  }
240
283
  }
241
284
 
242
285
  export async function stopRecording() {
243
286
  try {
244
- if (!isCurrentlyRecording || !Voice) return;
245
-
246
- // Set this first to prevent race conditions
247
- isCurrentlyRecording = false;
248
-
249
- if (silenceTimer) {
250
- clearTimeout(silenceTimer);
251
- silenceTimer = null;
252
- }
253
-
254
- // First try to stop
255
- try {
256
- await Voice.stop();
257
- // Wait a bit for stop to complete
258
- await new Promise(resolve => setTimeout(resolve, 100));
259
- } catch (error) {
260
- console.error('Error stopping Voice:', error);
287
+ console.log('[stopRecording] Called, current state:', state);
288
+ // Can be called from LISTENING or FINALIZING state
289
+ if ((state !== State.LISTENING && state !== State.FINALIZING) || !Voice) {
290
+ console.log('[stopRecording] Invalid state or no Voice, returning');
291
+ return;
261
292
  }
262
-
263
- // Then force destroy
264
- try {
265
- await Voice.destroy();
266
- await new Promise(resolve => setTimeout(resolve, 300));
267
- } catch (error) {
268
- console.error('Error destroying Voice:', error);
293
+
294
+ // Only set to FINALIZING if not already there
295
+ if (state === State.LISTENING) {
296
+ console.log('[stopRecording] Setting state to FINALIZING');
297
+ state = State.FINALIZING;
269
298
  }
270
-
271
- // Final cleanup
272
- await cleanupVoiceSession();
299
+
300
+ clearSilenceTimer();
301
+ console.log('[stopRecording] Calling Voice.stop()');
302
+ await Voice.stop();
303
+ console.log('[stopRecording] Voice.stop() completed');
273
304
  } catch (error) {
274
305
  console.error('Error in stopRecording:', error);
275
- // Force cleanup on error
276
- await cleanupVoiceSession();
306
+ cleanupVoiceSession();
277
307
  }
278
308
  }
279
309
 
@@ -281,10 +311,10 @@ export async function cancelRecording() {
281
311
  try {
282
312
  if (!Voice) return;
283
313
  await Voice.cancel();
284
- await cleanupVoiceSession();
314
+ cleanupVoiceSession();
285
315
  } catch (error) {
286
316
  console.error('Error canceling voice recognition:', error);
287
- await cleanupVoiceSession();
317
+ cleanupVoiceSession();
288
318
  }
289
319
  }
290
320
 
@@ -319,8 +349,6 @@ export async function requestAudioPermission() {
319
349
 
320
350
  async function requestAndroidPermission() {
321
351
  try {
322
-
323
-
324
352
  // Request microphone permission
325
353
  const micPermission = await request(PERMISSIONS.ANDROID.RECORD_AUDIO);
326
354
  if (micPermission !== RESULTS.GRANTED) {
@@ -328,12 +356,18 @@ async function requestAndroidPermission() {
328
356
  return false;
329
357
  }
330
358
 
331
- // Skip checking speech recognition services which is causing errors
332
- const services = await Voice.getSpeechRecognitionServices();
333
- if (!services || services.length === 0) {
334
- console.error('No speech recognition services available');
335
- return false;
359
+ // Check speech recognition services
360
+ try {
361
+ const services = await Voice.getSpeechRecognitionServices();
362
+ if (!services || services.length === 0) {
363
+ console.error('No speech recognition services available');
364
+ return false;
365
+ }
366
+ } catch (e) {
367
+ console.log('Error checking speech services:', e);
368
+ // Continue anyway - some devices report error but work fine
336
369
  }
370
+
337
371
  return true;
338
372
  } catch (error) {
339
373
  console.error('Error requesting Android permission:', error);
@@ -373,20 +407,5 @@ export function resetStoredPermission() {
373
407
  }
374
408
 
375
409
  export function cleanup() {
376
- if (!Voice) {
377
- console.log('Voice module not available during cleanup');
378
- return;
379
- }
380
-
381
- Voice.destroy().then(() => {
382
- Voice.removeAllListeners();
383
- cleanupVoiceSession();
384
- }).catch(error => {
385
- console.error('Error in cleanup:', error);
386
- // Try one more time
387
- if (Voice) {
388
- Voice.destroy().catch(e => console.error('Final cleanup attempt failed:', e));
389
- }
390
- });
391
- }
392
-
410
+ cleanupVoiceSession();
411
+ }