@volley/recognition-client-sdk-node22 0.1.424
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +344 -0
- package/dist/browser.bundled.d.ts +1280 -0
- package/dist/browser.d.ts +10 -0
- package/dist/browser.d.ts.map +1 -0
- package/dist/config-builder.d.ts +134 -0
- package/dist/config-builder.d.ts.map +1 -0
- package/dist/errors.d.ts +41 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/factory.d.ts +36 -0
- package/dist/factory.d.ts.map +1 -0
- package/dist/index.bundled.d.ts +2572 -0
- package/dist/index.d.ts +16 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +10199 -0
- package/dist/index.js.map +7 -0
- package/dist/recog-client-sdk.browser.d.ts +10 -0
- package/dist/recog-client-sdk.browser.d.ts.map +1 -0
- package/dist/recog-client-sdk.browser.js +5746 -0
- package/dist/recog-client-sdk.browser.js.map +7 -0
- package/dist/recognition-client.d.ts +128 -0
- package/dist/recognition-client.d.ts.map +1 -0
- package/dist/recognition-client.types.d.ts +271 -0
- package/dist/recognition-client.types.d.ts.map +1 -0
- package/dist/simplified-vgf-recognition-client.d.ts +178 -0
- package/dist/simplified-vgf-recognition-client.d.ts.map +1 -0
- package/dist/utils/audio-ring-buffer.d.ts +69 -0
- package/dist/utils/audio-ring-buffer.d.ts.map +1 -0
- package/dist/utils/message-handler.d.ts +45 -0
- package/dist/utils/message-handler.d.ts.map +1 -0
- package/dist/utils/url-builder.d.ts +28 -0
- package/dist/utils/url-builder.d.ts.map +1 -0
- package/dist/vgf-recognition-mapper.d.ts +66 -0
- package/dist/vgf-recognition-mapper.d.ts.map +1 -0
- package/dist/vgf-recognition-state.d.ts +91 -0
- package/dist/vgf-recognition-state.d.ts.map +1 -0
- package/package.json +74 -0
- package/src/browser.ts +24 -0
- package/src/config-builder.spec.ts +265 -0
- package/src/config-builder.ts +240 -0
- package/src/errors.ts +84 -0
- package/src/factory.spec.ts +215 -0
- package/src/factory.ts +47 -0
- package/src/index.ts +127 -0
- package/src/recognition-client.spec.ts +889 -0
- package/src/recognition-client.ts +844 -0
- package/src/recognition-client.types.ts +338 -0
- package/src/simplified-vgf-recognition-client.integration.spec.ts +718 -0
- package/src/simplified-vgf-recognition-client.spec.ts +1525 -0
- package/src/simplified-vgf-recognition-client.ts +524 -0
- package/src/utils/audio-ring-buffer.spec.ts +335 -0
- package/src/utils/audio-ring-buffer.ts +170 -0
- package/src/utils/message-handler.spec.ts +311 -0
- package/src/utils/message-handler.ts +131 -0
- package/src/utils/url-builder.spec.ts +252 -0
- package/src/utils/url-builder.ts +92 -0
- package/src/vgf-recognition-mapper.spec.ts +78 -0
- package/src/vgf-recognition-mapper.ts +232 -0
- package/src/vgf-recognition-state.ts +102 -0
|
@@ -0,0 +1,889 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unit tests for RealTimeTwoWayWebSocketRecognitionClient
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { RealTimeTwoWayWebSocketRecognitionClient } from './recognition-client';
|
|
6
|
+
import { ClientState } from './recognition-client.types';
|
|
7
|
+
import { RecognitionResultTypeV1 } from '@recog/shared-types';
|
|
8
|
+
import { WebSocket as MockWebSocket } from 'ws';
|
|
9
|
+
|
|
10
|
+
// Mock WebSocket
|
|
11
|
+
jest.mock('ws');
|
|
12
|
+
|
|
13
|
+
describe('RealTimeTwoWayWebSocketRecognitionClient', () => {
|
|
14
|
+
let client: RealTimeTwoWayWebSocketRecognitionClient;
|
|
15
|
+
let mockWs: any;
|
|
16
|
+
|
|
17
|
+
beforeEach(() => {
|
|
18
|
+
// Reset mocks
|
|
19
|
+
jest.clearAllMocks();
|
|
20
|
+
|
|
21
|
+
// Create mock WebSocket
|
|
22
|
+
mockWs = {
|
|
23
|
+
readyState: MockWebSocket.CONNECTING,
|
|
24
|
+
send: jest.fn(),
|
|
25
|
+
close: jest.fn(),
|
|
26
|
+
on: jest.fn(),
|
|
27
|
+
removeAllListeners: jest.fn(),
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
// Mock WebSocket constructor
|
|
31
|
+
(MockWebSocket as any).mockImplementation(() => mockWs);
|
|
32
|
+
|
|
33
|
+
// Create client
|
|
34
|
+
client = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
35
|
+
url: 'ws://test.example.com/recognize',
|
|
36
|
+
asrRequestConfig: {
|
|
37
|
+
provider: 'deepgram',
|
|
38
|
+
language: 'en',
|
|
39
|
+
sampleRate: 16000,
|
|
40
|
+
encoding: 'linear16'
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
afterEach(() => {
|
|
46
|
+
// Clean up
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
describe('Constructor', () => {
|
|
50
|
+
it('should initialize with correct default values', () => {
|
|
51
|
+
expect(client.getState()).toBe(ClientState.INITIAL);
|
|
52
|
+
expect(client.isConnected()).toBe(false);
|
|
53
|
+
expect(client.isBufferOverflowing()).toBe(false);
|
|
54
|
+
expect(client.getAudioUtteranceId()).toBeDefined();
|
|
55
|
+
expect(typeof client.getAudioUtteranceId()).toBe('string');
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
it('should have immutable audioUtteranceId', () => {
|
|
59
|
+
const originalId = client.getAudioUtteranceId();
|
|
60
|
+
// audioUtteranceId should not change
|
|
61
|
+
expect(client.getAudioUtteranceId()).toBe(originalId);
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
it('should expose the WebSocket URL via getUrl()', () => {
|
|
65
|
+
const url = client.getUrl();
|
|
66
|
+
expect(url).toBeDefined();
|
|
67
|
+
expect(typeof url).toBe('string');
|
|
68
|
+
expect(url).toContain('audioUtteranceId=');
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
it('should build URL from stage parameter', () => {
|
|
72
|
+
const stagingClient = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
73
|
+
stage: 'staging',
|
|
74
|
+
asrRequestConfig: {
|
|
75
|
+
provider: 'deepgram',
|
|
76
|
+
language: 'en',
|
|
77
|
+
sampleRate: 16000,
|
|
78
|
+
encoding: 'linear16'
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
const url = stagingClient.getUrl();
|
|
82
|
+
expect(url).toBeDefined();
|
|
83
|
+
// URL should be built from stage (exact URL depends on mocked getRecognitionServiceBase)
|
|
84
|
+
expect(url).toContain('/ws/v1/recognize');
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
it('should prioritize url over stage when both provided', () => {
|
|
88
|
+
const explicitUrlClient = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
89
|
+
url: 'ws://custom.example.com/ws/v1/recognize',
|
|
90
|
+
stage: 'staging',
|
|
91
|
+
asrRequestConfig: {
|
|
92
|
+
provider: 'deepgram',
|
|
93
|
+
language: 'en',
|
|
94
|
+
sampleRate: 16000,
|
|
95
|
+
encoding: 'linear16'
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
const url = explicitUrlClient.getUrl();
|
|
99
|
+
expect(url).toContain('ws://custom.example.com/ws/v1/recognize');
|
|
100
|
+
expect(url).not.toContain('staging');
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
it('should initialize stats correctly', () => {
|
|
104
|
+
const stats = client.getStats();
|
|
105
|
+
expect(stats.audioBytesSent).toBe(0);
|
|
106
|
+
expect(stats.audioChunksSent).toBe(0);
|
|
107
|
+
expect(stats.audioChunksBuffered).toBe(0);
|
|
108
|
+
expect(stats.bufferOverflowCount).toBe(0);
|
|
109
|
+
expect(stats.currentBufferedChunks).toBe(0);
|
|
110
|
+
});
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
describe.skip('State Management', () => {
|
|
114
|
+
it('should transition from INITIAL to CONNECTING on connect()', async () => {
|
|
115
|
+
expect(client.getState()).toBe(ClientState.INITIAL);
|
|
116
|
+
|
|
117
|
+
// Simulate successful connection
|
|
118
|
+
const connectPromise = client.connect();
|
|
119
|
+
expect(client.getState()).toBe(ClientState.CONNECTING);
|
|
120
|
+
|
|
121
|
+
// Simulate WebSocket open event
|
|
122
|
+
mockWs.readyState = MockWebSocket.OPEN;
|
|
123
|
+
const openHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'open')[1];
|
|
124
|
+
openHandler();
|
|
125
|
+
|
|
126
|
+
await connectPromise;
|
|
127
|
+
expect(client.getState()).toBe(ClientState.CONNECTED);
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
it('should transition to READY when server sends ready message', async () => {
|
|
131
|
+
// Connect first
|
|
132
|
+
const connectPromise = client.connect();
|
|
133
|
+
mockWs.readyState = MockWebSocket.OPEN;
|
|
134
|
+
const openHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'open')[1];
|
|
135
|
+
openHandler();
|
|
136
|
+
await connectPromise;
|
|
137
|
+
|
|
138
|
+
// Simulate ready message
|
|
139
|
+
const messageHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'message')[1];
|
|
140
|
+
const readyMessage = JSON.stringify({
|
|
141
|
+
v: 1,
|
|
142
|
+
type: 'message',
|
|
143
|
+
data: {
|
|
144
|
+
type: 'ClientControlMessage',
|
|
145
|
+
action: 'ready_for_uploading_recording',
|
|
146
|
+
audioUtteranceId: 'test-utterance-id'
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
messageHandler(readyMessage);
|
|
150
|
+
|
|
151
|
+
expect(client.getState()).toBe(ClientState.READY);
|
|
152
|
+
expect(client.isConnected()).toBe(true);
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
it('should transition to STOPPING when stopRecording() is called', async () => {
|
|
156
|
+
// Setup: Connect and become ready
|
|
157
|
+
await setupConnectedClient();
|
|
158
|
+
|
|
159
|
+
// Call stopRecording
|
|
160
|
+
const stopPromise = client.stopRecording();
|
|
161
|
+
expect(client.getState()).toBe(ClientState.STOPPING);
|
|
162
|
+
|
|
163
|
+
// Simulate final transcript
|
|
164
|
+
const messageHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'message')[1];
|
|
165
|
+
const finalMessage = JSON.stringify({
|
|
166
|
+
v: 1,
|
|
167
|
+
type: 'message',
|
|
168
|
+
data: {
|
|
169
|
+
type: 'Transcription',
|
|
170
|
+
finalTranscript: 'test',
|
|
171
|
+
is_finished: true
|
|
172
|
+
}
|
|
173
|
+
});
|
|
174
|
+
messageHandler(finalMessage);
|
|
175
|
+
|
|
176
|
+
await stopPromise;
|
|
177
|
+
expect(client.getState()).toBe(ClientState.STOPPED);
|
|
178
|
+
});
|
|
179
|
+
|
|
180
|
+
it('should transition to FAILED on connection error', async () => {
|
|
181
|
+
const connectPromise = client.connect();
|
|
182
|
+
|
|
183
|
+
// Simulate error
|
|
184
|
+
const errorHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'error')[1];
|
|
185
|
+
errorHandler(new Error('Connection failed'));
|
|
186
|
+
|
|
187
|
+
try {
|
|
188
|
+
await connectPromise;
|
|
189
|
+
} catch (e) {
|
|
190
|
+
// Expected
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
expect(client.getState()).toBe(ClientState.FAILED);
|
|
194
|
+
});
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
describe.skip('Connection Handling', () => {
|
|
198
|
+
it('should handle duplicate connect() calls', async () => {
|
|
199
|
+
// Call connect twice
|
|
200
|
+
const promise1 = client.connect();
|
|
201
|
+
const promise2 = client.connect();
|
|
202
|
+
|
|
203
|
+
// Should be the same promise
|
|
204
|
+
expect(promise1).toBe(promise2);
|
|
205
|
+
|
|
206
|
+
// Simulate successful connection
|
|
207
|
+
mockWs.readyState = MockWebSocket.OPEN;
|
|
208
|
+
const openHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'open')[1];
|
|
209
|
+
openHandler();
|
|
210
|
+
|
|
211
|
+
await Promise.all([promise1, promise2]);
|
|
212
|
+
expect(client.getState()).toBe(ClientState.CONNECTED);
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
it('should not reconnect if already connected', async () => {
|
|
216
|
+
// First connection
|
|
217
|
+
await setupConnectedClient();
|
|
218
|
+
const firstWs = mockWs;
|
|
219
|
+
|
|
220
|
+
// Try to connect again
|
|
221
|
+
await client.connect();
|
|
222
|
+
|
|
223
|
+
// Should not create new WebSocket
|
|
224
|
+
expect(MockWebSocket).toHaveBeenCalledTimes(1);
|
|
225
|
+
expect(client.isConnected()).toBe(true);
|
|
226
|
+
});
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
describe.skip('Audio Handling', () => {
|
|
230
|
+
it('should buffer audio when not ready', () => {
|
|
231
|
+
const audioData = Buffer.from([1, 2, 3, 4]);
|
|
232
|
+
client.sendAudio(audioData);
|
|
233
|
+
|
|
234
|
+
const stats = client.getStats();
|
|
235
|
+
expect(stats.audioBytesSent).toBe(0);
|
|
236
|
+
expect(stats.audioChunksBuffered).toBe(1);
|
|
237
|
+
expect(stats.currentBufferedChunks).toBe(1);
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
it('should send audio immediately when ready', async () => {
|
|
241
|
+
await setupReadyClient();
|
|
242
|
+
|
|
243
|
+
const audioData = Buffer.from([1, 2, 3, 4]);
|
|
244
|
+
client.sendAudio(audioData);
|
|
245
|
+
|
|
246
|
+
// Should have sent the audio
|
|
247
|
+
expect(mockWs.send).toHaveBeenCalled();
|
|
248
|
+
const stats = client.getStats();
|
|
249
|
+
expect(stats.audioBytesSent).toBe(4);
|
|
250
|
+
expect(stats.audioChunksSent).toBe(1);
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
it('should flush buffered audio when becoming ready', async () => {
|
|
254
|
+
// Buffer some audio while disconnected
|
|
255
|
+
const audioData1 = Buffer.from([1, 2, 3, 4]);
|
|
256
|
+
const audioData2 = Buffer.from([5, 6, 7, 8]);
|
|
257
|
+
client.sendAudio(audioData1);
|
|
258
|
+
client.sendAudio(audioData2);
|
|
259
|
+
|
|
260
|
+
// Verify buffered
|
|
261
|
+
let stats = client.getStats();
|
|
262
|
+
expect(stats.currentBufferedChunks).toBe(2);
|
|
263
|
+
expect(stats.audioBytesSent).toBe(0);
|
|
264
|
+
|
|
265
|
+
// Connect and become ready
|
|
266
|
+
await setupReadyClient();
|
|
267
|
+
|
|
268
|
+
// Should have flushed the buffer
|
|
269
|
+
stats = client.getStats();
|
|
270
|
+
expect(stats.audioBytesSent).toBe(8);
|
|
271
|
+
expect(stats.audioChunksSent).toBe(2);
|
|
272
|
+
expect(stats.currentBufferedChunks).toBe(0);
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
it('should detect buffer overflow', () => {
|
|
276
|
+
// Fill buffer to capacity (simulate overflow)
|
|
277
|
+
const chunkSize = 1024;
|
|
278
|
+
const maxChunks = 6000; // Default buffer size
|
|
279
|
+
|
|
280
|
+
for (let i = 0; i <= maxChunks; i++) {
|
|
281
|
+
client.sendAudio(Buffer.alloc(chunkSize));
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
expect(client.isBufferOverflowing()).toBe(true);
|
|
285
|
+
const stats = client.getStats();
|
|
286
|
+
expect(stats.bufferOverflowCount).toBeGreaterThan(0);
|
|
287
|
+
});
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
describe.skip('Helper Methods', () => {
|
|
291
|
+
it('should report correct connection status', async () => {
|
|
292
|
+
expect(client.isConnected()).toBe(false);
|
|
293
|
+
|
|
294
|
+
await setupConnectedClient();
|
|
295
|
+
expect(client.isConnected()).toBe(true);
|
|
296
|
+
|
|
297
|
+
await setupReadyClient();
|
|
298
|
+
expect(client.isConnected()).toBe(true);
|
|
299
|
+
|
|
300
|
+
// Disconnect
|
|
301
|
+
const closeHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'close')[1];
|
|
302
|
+
closeHandler(1000, 'Normal closure');
|
|
303
|
+
expect(client.isConnected()).toBe(false);
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
it('should track statistics correctly', async () => {
|
|
307
|
+
await setupReadyClient();
|
|
308
|
+
|
|
309
|
+
// Send some audio
|
|
310
|
+
client.sendAudio(Buffer.alloc(100));
|
|
311
|
+
client.sendAudio(Buffer.alloc(200));
|
|
312
|
+
client.sendAudio(Buffer.alloc(300));
|
|
313
|
+
|
|
314
|
+
const stats = client.getStats();
|
|
315
|
+
expect(stats.audioBytesSent).toBe(600);
|
|
316
|
+
expect(stats.audioChunksSent).toBe(3);
|
|
317
|
+
expect(stats.audioChunksBuffered).toBe(3);
|
|
318
|
+
});
|
|
319
|
+
});
|
|
320
|
+
|
|
321
|
+
describe.skip('Memory Management', () => {
|
|
322
|
+
it('should clear ring buffer on disconnect', async () => {
|
|
323
|
+
// Buffer some audio
|
|
324
|
+
client.sendAudio(Buffer.alloc(100));
|
|
325
|
+
client.sendAudio(Buffer.alloc(200));
|
|
326
|
+
|
|
327
|
+
let stats = client.getStats();
|
|
328
|
+
expect(stats.currentBufferedChunks).toBe(2);
|
|
329
|
+
|
|
330
|
+
// Connect then disconnect
|
|
331
|
+
await setupConnectedClient();
|
|
332
|
+
const closeHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'close')[1];
|
|
333
|
+
closeHandler(1000, 'Normal closure');
|
|
334
|
+
|
|
335
|
+
// Buffer should be cleared
|
|
336
|
+
stats = client.getStats();
|
|
337
|
+
expect(stats.currentBufferedChunks).toBe(0);
|
|
338
|
+
});
|
|
339
|
+
|
|
340
|
+
it('should cleanup WebSocket listeners on close', async () => {
|
|
341
|
+
await setupConnectedClient();
|
|
342
|
+
|
|
343
|
+
await client.stopRecording();
|
|
344
|
+
|
|
345
|
+
expect(mockWs.removeAllListeners).toHaveBeenCalled();
|
|
346
|
+
expect(mockWs.close).toHaveBeenCalled();
|
|
347
|
+
});
|
|
348
|
+
});
|
|
349
|
+
|
|
350
|
+
describe.skip('Message Handling', () => {
|
|
351
|
+
it('should handle transcription messages', async () => {
|
|
352
|
+
const onTranscript = jest.fn();
|
|
353
|
+
client = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
354
|
+
url: 'ws://test.example.com/recognize',
|
|
355
|
+
onTranscript,
|
|
356
|
+
});
|
|
357
|
+
|
|
358
|
+
await setupConnectedClient();
|
|
359
|
+
|
|
360
|
+
const messageHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'message')[1];
|
|
361
|
+
const transcriptMessage = JSON.stringify({
|
|
362
|
+
v: 1,
|
|
363
|
+
type: 'message',
|
|
364
|
+
data: {
|
|
365
|
+
type: 'Transcription',
|
|
366
|
+
finalTranscript: 'Hello world',
|
|
367
|
+
finalTranscriptConfidence: 0.95,
|
|
368
|
+
is_finished: false
|
|
369
|
+
}
|
|
370
|
+
});
|
|
371
|
+
|
|
372
|
+
messageHandler(transcriptMessage);
|
|
373
|
+
|
|
374
|
+
expect(onTranscript).toHaveBeenCalledWith(expect.objectContaining({
|
|
375
|
+
finalTranscript: 'Hello world',
|
|
376
|
+
finalTranscriptConfidence: 0.95,
|
|
377
|
+
is_finished: false
|
|
378
|
+
}));
|
|
379
|
+
});
|
|
380
|
+
|
|
381
|
+
it('should handle function call messages', async () => {
|
|
382
|
+
const onFunctionCall = jest.fn();
|
|
383
|
+
client = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
384
|
+
url: 'ws://test.example.com/recognize',
|
|
385
|
+
onFunctionCall,
|
|
386
|
+
});
|
|
387
|
+
|
|
388
|
+
await setupConnectedClient();
|
|
389
|
+
|
|
390
|
+
const messageHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'message')[1];
|
|
391
|
+
const functionCallMessage = JSON.stringify({
|
|
392
|
+
v: 1,
|
|
393
|
+
type: 'message',
|
|
394
|
+
data: {
|
|
395
|
+
type: 'FunctionCall',
|
|
396
|
+
audioUtteranceId: 'test-id',
|
|
397
|
+
functionName: 'playMusic',
|
|
398
|
+
functionArgJson: '{"song": "Bohemian Rhapsody"}'
|
|
399
|
+
}
|
|
400
|
+
});
|
|
401
|
+
|
|
402
|
+
messageHandler(functionCallMessage);
|
|
403
|
+
|
|
404
|
+
expect(onFunctionCall).toHaveBeenCalledWith(expect.objectContaining({
|
|
405
|
+
type: 'FunctionCall',
|
|
406
|
+
audioUtteranceId: 'test-id',
|
|
407
|
+
functionName: 'playMusic',
|
|
408
|
+
functionArgJson: '{"song": "Bohemian Rhapsody"}'
|
|
409
|
+
}));
|
|
410
|
+
});
|
|
411
|
+
|
|
412
|
+
it('should handle metadata messages', async () => {
|
|
413
|
+
const onMetadata = jest.fn();
|
|
414
|
+
client = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
415
|
+
url: 'ws://test.example.com/recognize',
|
|
416
|
+
onMetadata,
|
|
417
|
+
});
|
|
418
|
+
|
|
419
|
+
await setupConnectedClient();
|
|
420
|
+
|
|
421
|
+
const messageHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'message')[1];
|
|
422
|
+
const metadataMessage = JSON.stringify({
|
|
423
|
+
v: 1,
|
|
424
|
+
type: 'message',
|
|
425
|
+
data: {
|
|
426
|
+
type: 'Metadata',
|
|
427
|
+
audioUtteranceId: 'test-id',
|
|
428
|
+
duration: 1000
|
|
429
|
+
}
|
|
430
|
+
});
|
|
431
|
+
|
|
432
|
+
messageHandler(metadataMessage);
|
|
433
|
+
|
|
434
|
+
expect(onMetadata).toHaveBeenCalledWith(expect.objectContaining({
|
|
435
|
+
audioUtteranceId: 'test-id',
|
|
436
|
+
duration: 1000
|
|
437
|
+
}));
|
|
438
|
+
});
|
|
439
|
+
|
|
440
|
+
it('should handle error messages', async () => {
|
|
441
|
+
const onError = jest.fn();
|
|
442
|
+
client = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
443
|
+
url: 'ws://test.example.com/recognize',
|
|
444
|
+
onError,
|
|
445
|
+
});
|
|
446
|
+
|
|
447
|
+
await setupConnectedClient();
|
|
448
|
+
|
|
449
|
+
const messageHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'message')[1];
|
|
450
|
+
const errorMessage = JSON.stringify({
|
|
451
|
+
v: 1,
|
|
452
|
+
type: 'message',
|
|
453
|
+
data: {
|
|
454
|
+
type: 'Error',
|
|
455
|
+
message: 'Something went wrong'
|
|
456
|
+
}
|
|
457
|
+
});
|
|
458
|
+
|
|
459
|
+
messageHandler(errorMessage);
|
|
460
|
+
|
|
461
|
+
expect(onError).toHaveBeenCalledWith(expect.any(Error));
|
|
462
|
+
expect(onError.mock.calls[0][0].message).toContain('Something went wrong');
|
|
463
|
+
});
|
|
464
|
+
|
|
465
|
+
it('should handle primitive message data without crashing', async () => {
|
|
466
|
+
const onError = jest.fn();
|
|
467
|
+
client = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
468
|
+
url: 'ws://test.example.com/recognize',
|
|
469
|
+
onError,
|
|
470
|
+
});
|
|
471
|
+
|
|
472
|
+
await setupConnectedClient();
|
|
473
|
+
|
|
474
|
+
const messageHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'message')[1];
|
|
475
|
+
|
|
476
|
+
// Test with various primitive values
|
|
477
|
+
const primitiveMessages = [
|
|
478
|
+
JSON.stringify({ v: 1, type: 'test', data: 'string' }),
|
|
479
|
+
JSON.stringify({ v: 1, type: 'test', data: 123 }),
|
|
480
|
+
JSON.stringify({ v: 1, type: 'test', data: true }),
|
|
481
|
+
JSON.stringify({ v: 1, type: 'test', data: null }),
|
|
482
|
+
];
|
|
483
|
+
|
|
484
|
+
// Should not throw
|
|
485
|
+
expect(() => {
|
|
486
|
+
primitiveMessages.forEach(msg => messageHandler(msg));
|
|
487
|
+
}).not.toThrow();
|
|
488
|
+
|
|
489
|
+
// Should log errors for primitives
|
|
490
|
+
expect(onError).toHaveBeenCalled();
|
|
491
|
+
});
|
|
492
|
+
});
|
|
493
|
+
|
|
494
|
+
describe('Blob Audio Handling', () => {
|
|
495
|
+
it('should accept Blob as audio input', async () => {
|
|
496
|
+
const audioData = new Uint8Array([1, 2, 3, 4]);
|
|
497
|
+
const blob = new Blob([audioData], { type: 'audio/raw' });
|
|
498
|
+
|
|
499
|
+
// Should not throw
|
|
500
|
+
expect(() => client.sendAudio(blob)).not.toThrow();
|
|
501
|
+
});
|
|
502
|
+
|
|
503
|
+
it('should convert Blob to ArrayBuffer before buffering', async () => {
|
|
504
|
+
const audioData = new Uint8Array([1, 2, 3, 4]);
|
|
505
|
+
const blob = new Blob([audioData], { type: 'audio/raw' });
|
|
506
|
+
|
|
507
|
+
client.sendAudio(blob);
|
|
508
|
+
|
|
509
|
+
// Wait for async conversion
|
|
510
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
511
|
+
|
|
512
|
+
const stats = client.getStats();
|
|
513
|
+
// Should have buffered the converted data
|
|
514
|
+
expect(stats.currentBufferedChunks).toBeGreaterThan(0);
|
|
515
|
+
});
|
|
516
|
+
|
|
517
|
+
it('should handle empty Blob', async () => {
|
|
518
|
+
const blob = new Blob([], { type: 'audio/raw' });
|
|
519
|
+
|
|
520
|
+
client.sendAudio(blob);
|
|
521
|
+
|
|
522
|
+
// Wait for async conversion
|
|
523
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
524
|
+
|
|
525
|
+
const stats = client.getStats();
|
|
526
|
+
// Empty blob should not be buffered
|
|
527
|
+
expect(stats.currentBufferedChunks).toBe(0);
|
|
528
|
+
});
|
|
529
|
+
|
|
530
|
+
it('should handle large Blob', async () => {
|
|
531
|
+
const largeData = new Uint8Array(1024 * 1024); // 1MB
|
|
532
|
+
for (let i = 0; i < largeData.length; i++) {
|
|
533
|
+
largeData[i] = i % 256;
|
|
534
|
+
}
|
|
535
|
+
const blob = new Blob([largeData], { type: 'audio/raw' });
|
|
536
|
+
|
|
537
|
+
client.sendAudio(blob);
|
|
538
|
+
|
|
539
|
+
// Wait for async conversion
|
|
540
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
541
|
+
|
|
542
|
+
const stats = client.getStats();
|
|
543
|
+
expect(stats.currentBufferedChunks).toBe(1);
|
|
544
|
+
});
|
|
545
|
+
|
|
546
|
+
it('should handle multiple Blobs in sequence', async () => {
|
|
547
|
+
const blob1 = new Blob([new Uint8Array([1, 2, 3, 4])], { type: 'audio/raw' });
|
|
548
|
+
const blob2 = new Blob([new Uint8Array([5, 6, 7, 8])], { type: 'audio/raw' });
|
|
549
|
+
const blob3 = new Blob([new Uint8Array([9, 10, 11, 12])], { type: 'audio/raw' });
|
|
550
|
+
|
|
551
|
+
client.sendAudio(blob1);
|
|
552
|
+
client.sendAudio(blob2);
|
|
553
|
+
client.sendAudio(blob3);
|
|
554
|
+
|
|
555
|
+
// Wait for all async conversions
|
|
556
|
+
await new Promise(resolve => setTimeout(resolve, 200));
|
|
557
|
+
|
|
558
|
+
const stats = client.getStats();
|
|
559
|
+
expect(stats.currentBufferedChunks).toBe(3);
|
|
560
|
+
});
|
|
561
|
+
|
|
562
|
+
it('should handle mixed Blob and ArrayBuffer inputs', async () => {
|
|
563
|
+
const blob = new Blob([new Uint8Array([1, 2, 3, 4])], { type: 'audio/raw' });
|
|
564
|
+
const arrayBuffer = new ArrayBuffer(4);
|
|
565
|
+
const view = new Uint8Array(arrayBuffer);
|
|
566
|
+
view.set([5, 6, 7, 8]);
|
|
567
|
+
|
|
568
|
+
client.sendAudio(blob);
|
|
569
|
+
client.sendAudio(arrayBuffer);
|
|
570
|
+
|
|
571
|
+
// Wait for Blob conversion
|
|
572
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
573
|
+
|
|
574
|
+
const stats = client.getStats();
|
|
575
|
+
expect(stats.currentBufferedChunks).toBe(2);
|
|
576
|
+
});
|
|
577
|
+
|
|
578
|
+
it('should log error if Blob conversion fails', async () => {
|
|
579
|
+
const mockLogger = jest.fn();
|
|
580
|
+
const testClient = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
581
|
+
url: 'ws://test.example.com/recognize',
|
|
582
|
+
logger: mockLogger
|
|
583
|
+
});
|
|
584
|
+
|
|
585
|
+
// Create a real Blob but spy on arrayBuffer to make it fail
|
|
586
|
+
const audioData = new Uint8Array([1, 2, 3, 4]);
|
|
587
|
+
const badBlob = new Blob([audioData], { type: 'audio/raw' });
|
|
588
|
+
|
|
589
|
+
// Mock arrayBuffer to reject
|
|
590
|
+
jest.spyOn(badBlob, 'arrayBuffer').mockRejectedValue(new Error('Conversion failed'));
|
|
591
|
+
|
|
592
|
+
testClient.sendAudio(badBlob);
|
|
593
|
+
|
|
594
|
+
// Wait for error handling
|
|
595
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
596
|
+
|
|
597
|
+
// Should have logged an error
|
|
598
|
+
const errorCalls = mockLogger.mock.calls.filter(call => call[0] === 'error');
|
|
599
|
+
expect(errorCalls.length).toBeGreaterThan(0);
|
|
600
|
+
});
|
|
601
|
+
|
|
602
|
+
// Note: The blobToArrayBuffer() function has dual-path support:
|
|
603
|
+
// - Modern browsers: Uses blob.arrayBuffer() [Chrome 76+, Safari 14+]
|
|
604
|
+
// - Older Smart TVs: Falls back to FileReader [Tizen 2018-2019, webOS 3.0-4.x]
|
|
605
|
+
|
|
606
|
+
it('should use blob.arrayBuffer() when available (modern path)', async () => {
|
|
607
|
+
const audioData = new Uint8Array([1, 2, 3, 4]);
|
|
608
|
+
const blob = new Blob([audioData], { type: 'audio/raw' });
|
|
609
|
+
|
|
610
|
+
// Spy on blob.arrayBuffer to verify it's called
|
|
611
|
+
const arrayBufferSpy = jest.spyOn(blob, 'arrayBuffer');
|
|
612
|
+
|
|
613
|
+
client.sendAudio(blob);
|
|
614
|
+
|
|
615
|
+
// Wait for async conversion
|
|
616
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
617
|
+
|
|
618
|
+
// Should have used modern blob.arrayBuffer()
|
|
619
|
+
expect(arrayBufferSpy).toHaveBeenCalled();
|
|
620
|
+
|
|
621
|
+
// Should have buffered successfully
|
|
622
|
+
const stats = client.getStats();
|
|
623
|
+
expect(stats.currentBufferedChunks).toBe(1);
|
|
624
|
+
});
|
|
625
|
+
|
|
626
|
+
it('should use FileReader fallback when blob.arrayBuffer not available (Smart TV path)', async () => {
|
|
627
|
+
const audioData = new Uint8Array([1, 2, 3, 4]);
|
|
628
|
+
|
|
629
|
+
// Create a real Blob
|
|
630
|
+
const blob = new Blob([audioData], { type: 'audio/raw' });
|
|
631
|
+
|
|
632
|
+
// Mock FileReader BEFORE removing arrayBuffer
|
|
633
|
+
const mockReadAsArrayBuffer = jest.fn();
|
|
634
|
+
const originalFileReader = (global as any).FileReader;
|
|
635
|
+
|
|
636
|
+
(global as any).FileReader = jest.fn().mockImplementation(() => ({
|
|
637
|
+
readAsArrayBuffer: mockReadAsArrayBuffer,
|
|
638
|
+
onload: null,
|
|
639
|
+
onerror: null,
|
|
640
|
+
result: audioData.buffer
|
|
641
|
+
}));
|
|
642
|
+
|
|
643
|
+
// Trigger FileReader path by simulating onload after a delay
|
|
644
|
+
mockReadAsArrayBuffer.mockImplementation(function(this: any) {
|
|
645
|
+
setTimeout(() => {
|
|
646
|
+
if (this.onload) {
|
|
647
|
+
this.result = audioData.buffer;
|
|
648
|
+
this.onload();
|
|
649
|
+
}
|
|
650
|
+
}, 10);
|
|
651
|
+
});
|
|
652
|
+
|
|
653
|
+
// Remove arrayBuffer method to simulate old Smart TV (must be done after blob creation)
|
|
654
|
+
Object.defineProperty(blob, 'arrayBuffer', {
|
|
655
|
+
value: undefined,
|
|
656
|
+
writable: true,
|
|
657
|
+
configurable: true
|
|
658
|
+
});
|
|
659
|
+
|
|
660
|
+
client.sendAudio(blob);
|
|
661
|
+
|
|
662
|
+
// Wait for FileReader async conversion
|
|
663
|
+
await new Promise(resolve => setTimeout(resolve, 150));
|
|
664
|
+
|
|
665
|
+
// Should have used FileReader
|
|
666
|
+
expect((global as any).FileReader).toHaveBeenCalled();
|
|
667
|
+
expect(mockReadAsArrayBuffer).toHaveBeenCalledWith(blob);
|
|
668
|
+
|
|
669
|
+
// Should have buffered successfully
|
|
670
|
+
const stats = client.getStats();
|
|
671
|
+
expect(stats.currentBufferedChunks).toBe(1);
|
|
672
|
+
|
|
673
|
+
// Cleanup
|
|
674
|
+
(global as any).FileReader = originalFileReader;
|
|
675
|
+
});
|
|
676
|
+
});
|
|
677
|
+
|
|
678
|
+
describe('Debug Logging', () => {
|
|
679
|
+
it('should not log debug messages when debug logging is disabled (default)', () => {
|
|
680
|
+
const mockLogger = jest.fn();
|
|
681
|
+
const testClient = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
682
|
+
url: 'ws://test.example.com/recognize',
|
|
683
|
+
asrRequestConfig: {
|
|
684
|
+
provider: 'deepgram',
|
|
685
|
+
language: 'en',
|
|
686
|
+
sampleRate: 16000,
|
|
687
|
+
encoding: 'linear16'
|
|
688
|
+
},
|
|
689
|
+
logger: mockLogger
|
|
690
|
+
});
|
|
691
|
+
|
|
692
|
+
// Trigger some actions that would normally log debug messages
|
|
693
|
+
expect(testClient.getState()).toBe(ClientState.INITIAL);
|
|
694
|
+
|
|
695
|
+
// Debug logs should not be called
|
|
696
|
+
const debugCalls = mockLogger.mock.calls.filter(call => call[0] === 'debug');
|
|
697
|
+
expect(debugCalls.length).toBe(0);
|
|
698
|
+
|
|
699
|
+
// But other log levels should work
|
|
700
|
+
const nonDebugCalls = mockLogger.mock.calls.filter(call => call[0] !== 'debug');
|
|
701
|
+
// May or may not have non-debug logs, just checking we can track them
|
|
702
|
+
expect(nonDebugCalls.length).toBeGreaterThanOrEqual(0);
|
|
703
|
+
});
|
|
704
|
+
|
|
705
|
+
it('should log debug messages when enableDebugLog is true in debugCommand', () => {
|
|
706
|
+
const mockLogger = jest.fn();
|
|
707
|
+
const testClient = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
708
|
+
url: 'ws://test.example.com/recognize',
|
|
709
|
+
asrRequestConfig: {
|
|
710
|
+
provider: 'deepgram',
|
|
711
|
+
language: 'en',
|
|
712
|
+
sampleRate: 16000,
|
|
713
|
+
encoding: 'linear16',
|
|
714
|
+
debugCommand: {
|
|
715
|
+
enableDebugLog: true
|
|
716
|
+
}
|
|
717
|
+
} as any, // Using 'as any' to bypass type checking for the new field
|
|
718
|
+
logger: mockLogger
|
|
719
|
+
});
|
|
720
|
+
|
|
721
|
+
// Note: Debug logging is enabled in onConnected() when ASR request is sent
|
|
722
|
+
// So we need to test after connection, but for now we verify the config is accepted
|
|
723
|
+
expect(testClient.getAudioUtteranceId()).toBeDefined();
|
|
724
|
+
});
|
|
725
|
+
|
|
726
|
+
it('should respect debugCommand.enableDebugLog flag', () => {
|
|
727
|
+
const mockLogger = jest.fn();
|
|
728
|
+
|
|
729
|
+
// Client with debug logging explicitly disabled
|
|
730
|
+
const clientNoDebug = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
731
|
+
url: 'ws://test.example.com/recognize',
|
|
732
|
+
asrRequestConfig: {
|
|
733
|
+
provider: 'deepgram',
|
|
734
|
+
language: 'en',
|
|
735
|
+
sampleRate: 16000,
|
|
736
|
+
encoding: 'linear16',
|
|
737
|
+
debugCommand: {
|
|
738
|
+
enableDebugLog: false
|
|
739
|
+
}
|
|
740
|
+
} as any,
|
|
741
|
+
logger: mockLogger
|
|
742
|
+
});
|
|
743
|
+
|
|
744
|
+
expect(clientNoDebug.getAudioUtteranceId()).toBeDefined();
|
|
745
|
+
|
|
746
|
+
// Debug logs should not be called
|
|
747
|
+
const debugCalls = mockLogger.mock.calls.filter(call => call[0] === 'debug');
|
|
748
|
+
expect(debugCalls.length).toBe(0);
|
|
749
|
+
});
|
|
750
|
+
});
|
|
751
|
+
|
|
752
|
+
// Helper functions
|
|
753
|
+
async function setupConnectedClient() {
|
|
754
|
+
const connectPromise = client.connect();
|
|
755
|
+
mockWs.readyState = MockWebSocket.OPEN;
|
|
756
|
+
const openCall = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'open');
|
|
757
|
+
if (!openCall) {
|
|
758
|
+
throw new Error('No "open" event handler registered on mockWs');
|
|
759
|
+
}
|
|
760
|
+
const openHandler = openCall[1];
|
|
761
|
+
openHandler();
|
|
762
|
+
await connectPromise;
|
|
763
|
+
}
|
|
764
|
+
|
|
765
|
+
async function setupReadyClient() {
|
|
766
|
+
await setupConnectedClient();
|
|
767
|
+
const messageHandler = mockWs.on.mock.calls.find((call: any[]) => call[0] === 'message')[1];
|
|
768
|
+
const readyMessage = JSON.stringify({
|
|
769
|
+
v: 1,
|
|
770
|
+
type: 'message',
|
|
771
|
+
data: {
|
|
772
|
+
type: 'ClientControlMessage',
|
|
773
|
+
action: 'ready_for_uploading_recording',
|
|
774
|
+
audioUtteranceId: 'test-utterance-id'
|
|
775
|
+
}
|
|
776
|
+
});
|
|
777
|
+
messageHandler(readyMessage);
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
describe('stopAbnormally', () => {
|
|
781
|
+
beforeEach(() => {
|
|
782
|
+
// Create fresh mock WebSocket
|
|
783
|
+
mockWs = {
|
|
784
|
+
readyState: MockWebSocket.CONNECTING,
|
|
785
|
+
send: jest.fn(),
|
|
786
|
+
close: jest.fn(),
|
|
787
|
+
on: jest.fn().mockReturnThis(),
|
|
788
|
+
removeAllListeners: jest.fn(),
|
|
789
|
+
};
|
|
790
|
+
|
|
791
|
+
// Mock WebSocket constructor
|
|
792
|
+
(MockWebSocket as any).mockImplementation(() => mockWs);
|
|
793
|
+
|
|
794
|
+
// Create fresh client
|
|
795
|
+
client = new RealTimeTwoWayWebSocketRecognitionClient({
|
|
796
|
+
url: 'ws://localhost:3000',
|
|
797
|
+
asrRequestConfig: {
|
|
798
|
+
provider: 'deepgram',
|
|
799
|
+
language: 'en',
|
|
800
|
+
sampleRate: 16000,
|
|
801
|
+
encoding: 'linear16'
|
|
802
|
+
},
|
|
803
|
+
onTranscript: jest.fn(),
|
|
804
|
+
onError: jest.fn(),
|
|
805
|
+
onConnected: jest.fn(),
|
|
806
|
+
onDisconnected: jest.fn()
|
|
807
|
+
});
|
|
808
|
+
});
|
|
809
|
+
|
|
810
|
+
it.skip('should immediately close WebSocket connection', async () => {
|
|
811
|
+
await setupReadyClient();
|
|
812
|
+
expect(client.getState()).toBe(ClientState.READY);
|
|
813
|
+
|
|
814
|
+
client.stopAbnormally();
|
|
815
|
+
|
|
816
|
+
expect(mockWs.close).toHaveBeenCalledWith(1000, 'Client abnormal stop');
|
|
817
|
+
});
|
|
818
|
+
|
|
819
|
+
it.skip('should update state to STOPPED', async () => {
|
|
820
|
+
await setupReadyClient();
|
|
821
|
+
|
|
822
|
+
client.stopAbnormally();
|
|
823
|
+
|
|
824
|
+
expect(client.getState()).toBe(ClientState.STOPPED);
|
|
825
|
+
});
|
|
826
|
+
|
|
827
|
+
it('should work from any state', () => {
|
|
828
|
+
// Test from INITIAL state
|
|
829
|
+
expect(client.getState()).toBe(ClientState.INITIAL);
|
|
830
|
+
client.stopAbnormally();
|
|
831
|
+
expect(client.getState()).toBe(ClientState.STOPPED);
|
|
832
|
+
});
|
|
833
|
+
|
|
834
|
+
it.skip('should clean up resources', async () => {
|
|
835
|
+
await setupReadyClient();
|
|
836
|
+
|
|
837
|
+
// Send some audio to populate buffers
|
|
838
|
+
client.sendAudio(new ArrayBuffer(1000));
|
|
839
|
+
|
|
840
|
+
// Verify audio was sent
|
|
841
|
+
const statsBefore = client.getStats();
|
|
842
|
+
expect(statsBefore.audioBytesSent).toBeGreaterThan(0);
|
|
843
|
+
|
|
844
|
+
client.stopAbnormally();
|
|
845
|
+
|
|
846
|
+
// Cleanup resets stats
|
|
847
|
+
const statsAfter = client.getStats();
|
|
848
|
+
expect(statsAfter.audioBytesSent).toBe(0);
|
|
849
|
+
expect(statsAfter.audioChunksSent).toBe(0);
|
|
850
|
+
});
|
|
851
|
+
|
|
852
|
+
it.skip('should not send stop signal to server (immediate disconnect)', async () => {
|
|
853
|
+
await setupReadyClient();
|
|
854
|
+
jest.clearAllMocks(); // Clear connection setup messages
|
|
855
|
+
|
|
856
|
+
client.stopAbnormally();
|
|
857
|
+
|
|
858
|
+
// Should NOT send stop recording signal (unlike stopRecording)
|
|
859
|
+
// Only closes the WebSocket
|
|
860
|
+
expect(mockWs.send).not.toHaveBeenCalled();
|
|
861
|
+
expect(mockWs.close).toHaveBeenCalled();
|
|
862
|
+
});
|
|
863
|
+
|
|
864
|
+
it.skip('should differ from stopRecording behavior', async () => {
|
|
865
|
+
// stopAbnormally does NOT send stop signal (unlike stopRecording which sends STOP_RECORDING signal)
|
|
866
|
+
// This is verified by the previous test "should not send stop signal to server"
|
|
867
|
+
// This test verifies stopAbnormally doesn't wait for server response
|
|
868
|
+
|
|
869
|
+
await setupReadyClient();
|
|
870
|
+
|
|
871
|
+
// Call stopAbnormally
|
|
872
|
+
client.stopAbnormally();
|
|
873
|
+
|
|
874
|
+
// State should immediately be STOPPED (not STOPPING)
|
|
875
|
+
expect(client.getState()).toBe(ClientState.STOPPED);
|
|
876
|
+
|
|
877
|
+
// This is different from stopRecording which would be STOPPING and waiting for server
|
|
878
|
+
});
|
|
879
|
+
|
|
880
|
+
it('should be idempotent - safe to call multiple times', () => {
|
|
881
|
+
client.stopAbnormally();
|
|
882
|
+
expect(client.getState()).toBe(ClientState.STOPPED);
|
|
883
|
+
|
|
884
|
+
// Call again - should not throw
|
|
885
|
+
expect(() => client.stopAbnormally()).not.toThrow();
|
|
886
|
+
expect(client.getState()).toBe(ClientState.STOPPED);
|
|
887
|
+
});
|
|
888
|
+
});
|
|
889
|
+
});
|