valenceai 0.5.1 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +42 -0
- package/README.md +357 -177
- package/package.json +15 -6
- package/src/asyncAudio.js +11 -6
- package/src/config.js +25 -14
- package/src/rateLimit.js +77 -0
- package/src/streaming.js +193 -0
- package/src/utils/logger.js +3 -3
- package/src/valenceClient.js +173 -68
- package/tests/asyncAudio.test.js +128 -71
- package/tests/client.test.js +10 -25
- package/tests/config.test.js +21 -21
- package/tests/e2e.asyncWorkflow.test.js +343 -0
- package/tests/e2e.streaming.test.js +420 -0
- package/tests/logger.test.js +3 -0
- package/tests/rateLimit.test.js +137 -0
- package/tests/setup.js +5 -4
- package/tests/streaming.test.js +187 -0
- package/tests/valenceClient.test.js +50 -5
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* End-to-End Tests for Async API Workflow
|
|
3
|
+
*
|
|
4
|
+
* These tests validate the complete async API workflow as documented in the README:
|
|
5
|
+
* 1. Upload Phase - File upload to S3
|
|
6
|
+
* 2. Background Processing - Server-side processing
|
|
7
|
+
* 3. Results Retrieval - Polling until completion
|
|
8
|
+
*
|
|
9
|
+
* Note: These tests make REAL API calls to the demo server with REAL audio files.
|
|
10
|
+
* Set VALENCE_E2E_TEST=true and VALENCE_API_KEY to run these tests.
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import { ValenceClient } from '../src/index.js';
|
|
14
|
+
import { describe, test, expect, beforeAll } from '@jest/globals';
|
|
15
|
+
import fs from 'fs';
|
|
16
|
+
import path from 'path';
|
|
17
|
+
import axios from 'axios';
|
|
18
|
+
|
|
19
|
+
// Skip E2E tests unless explicitly enabled
|
|
20
|
+
const describeE2E = process.env.VALENCE_E2E_TEST === 'true' ? describe : describe.skip;
|
|
21
|
+
|
|
22
|
+
// Path to real test audio files
|
|
23
|
+
const TEST_AUDIO_DIR = '/Users/julianolarte/code/valenceai/valence-backend-api/tests/fixtures';
|
|
24
|
+
const TEST_SHORT_AUDIO = path.join(TEST_AUDIO_DIR, 'test_file.wav');
|
|
25
|
+
const TEST_LONG_AUDIO = path.join(TEST_AUDIO_DIR, 'long_file_sample.wav');
|
|
26
|
+
|
|
27
|
+
describeE2E('Async API E2E Tests', () => {
|
|
28
|
+
let client;
|
|
29
|
+
let apiKey;
|
|
30
|
+
let baseUrl;
|
|
31
|
+
|
|
32
|
+
beforeAll(() => {
|
|
33
|
+
// Check for API key
|
|
34
|
+
apiKey = process.env.VALENCE_API_KEY;
|
|
35
|
+
if (!apiKey) {
|
|
36
|
+
throw new Error('VALENCE_API_KEY not set');
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Use demo base URL by default
|
|
40
|
+
baseUrl = process.env.VALENCE_API_BASE_URL || 'https://demo.getvalenceai.com';
|
|
41
|
+
|
|
42
|
+
client = new ValenceClient({
|
|
43
|
+
apiKey,
|
|
44
|
+
baseUrl
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
// Verify test files exist
|
|
48
|
+
if (!fs.existsSync(TEST_SHORT_AUDIO)) {
|
|
49
|
+
throw new Error(`Test audio file not found: ${TEST_SHORT_AUDIO}`);
|
|
50
|
+
}
|
|
51
|
+
if (!fs.existsSync(TEST_LONG_AUDIO)) {
|
|
52
|
+
throw new Error(`Test audio file not found: ${TEST_LONG_AUDIO}`);
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
test('Complete async workflow with real audio file', async () => {
|
|
57
|
+
console.log('\n' + '='.repeat(70));
|
|
58
|
+
console.log('Testing Complete Async Workflow');
|
|
59
|
+
console.log('='.repeat(70));
|
|
60
|
+
|
|
61
|
+
// STEP 1: Upload Phase - File is uploaded to S3
|
|
62
|
+
console.log('\nSTEP 1: Upload Phase - File upload to S3');
|
|
63
|
+
console.log('-'.repeat(70));
|
|
64
|
+
console.log(`Using test file: ${path.basename(TEST_LONG_AUDIO)}`);
|
|
65
|
+
|
|
66
|
+
const startUpload = Date.now();
|
|
67
|
+
const requestId = await client.asynch.upload(TEST_LONG_AUDIO);
|
|
68
|
+
const uploadTime = (Date.now() - startUpload) / 1000;
|
|
69
|
+
|
|
70
|
+
console.log('✓ Upload to S3 complete!');
|
|
71
|
+
console.log(` Request ID: ${requestId}`);
|
|
72
|
+
console.log(` Upload duration: ${uploadTime.toFixed(2)} seconds`);
|
|
73
|
+
console.log(' Note: File is uploaded but NOT processed yet');
|
|
74
|
+
|
|
75
|
+
// Verify request_id is returned (tracking ID, NOT completion)
|
|
76
|
+
expect(requestId).toBeDefined();
|
|
77
|
+
expect(typeof requestId).toBe('string');
|
|
78
|
+
expect(requestId.length).toBeGreaterThan(0);
|
|
79
|
+
|
|
80
|
+
// STEP 2: Background Processing
|
|
81
|
+
console.log('\nSTEP 2: Background Processing Phase');
|
|
82
|
+
console.log('-'.repeat(70));
|
|
83
|
+
console.log(' Server automatically:');
|
|
84
|
+
console.log(' - Detects uploaded file (checks every 10 seconds)');
|
|
85
|
+
console.log(' - Downloads audio from S3');
|
|
86
|
+
console.log(' - Splits into 5-second segments');
|
|
87
|
+
console.log(' - Runs ML model on each chunk');
|
|
88
|
+
console.log(' - Stores results in database');
|
|
89
|
+
console.log(' Status progression: initiated → upload_completed → processing → completed');
|
|
90
|
+
|
|
91
|
+
// STEP 3: Poll for Results
|
|
92
|
+
console.log('\nSTEP 3: Results Retrieval Phase - Polling for completion');
|
|
93
|
+
console.log('-'.repeat(70));
|
|
94
|
+
|
|
95
|
+
const startPoll = Date.now();
|
|
96
|
+
const result = await client.asynch.emotions(
|
|
97
|
+
requestId,
|
|
98
|
+
50, // maxTries
|
|
99
|
+
10000 // intervalMs (10 seconds)
|
|
100
|
+
);
|
|
101
|
+
const pollTime = (Date.now() - startPoll) / 1000;
|
|
102
|
+
|
|
103
|
+
console.log('✓ Processing complete!');
|
|
104
|
+
console.log(` Status: ${result.status}`);
|
|
105
|
+
console.log(` Processing duration: ${pollTime.toFixed(2)} seconds`);
|
|
106
|
+
console.log(` Total emotion data points: ${result.emotions.length}`);
|
|
107
|
+
|
|
108
|
+
// Verify results
|
|
109
|
+
expect(result.status).toBe('completed');
|
|
110
|
+
expect(result.emotions).toBeDefined();
|
|
111
|
+
expect(result.emotions.length).toBeGreaterThan(0);
|
|
112
|
+
|
|
113
|
+
// Verify emotion data structure
|
|
114
|
+
const firstEmotion = result.emotions[0];
|
|
115
|
+
expect(firstEmotion).toHaveProperty('timestamp');
|
|
116
|
+
expect(firstEmotion).toHaveProperty('start_time');
|
|
117
|
+
expect(firstEmotion).toHaveProperty('end_time');
|
|
118
|
+
expect(firstEmotion).toHaveProperty('emotion');
|
|
119
|
+
expect(firstEmotion).toHaveProperty('confidence');
|
|
120
|
+
|
|
121
|
+
console.log('\n Sample emotion data:');
|
|
122
|
+
result.emotions.slice(0, 3).forEach((emotion, i) => {
|
|
123
|
+
console.log(` [${i+1}] ${emotion.start_time.toFixed(1)}s-${emotion.end_time.toFixed(1)}s: ` +
|
|
124
|
+
`${emotion.emotion} (${(emotion.confidence * 100).toFixed(1)}%)`);
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
console.log('\n' + '='.repeat(70));
|
|
128
|
+
console.log('✅ COMPLETE WORKFLOW TEST PASSED');
|
|
129
|
+
console.log(` Total time: ${(uploadTime + pollTime).toFixed(2)} seconds`);
|
|
130
|
+
console.log('='.repeat(70));
|
|
131
|
+
}, 600000); // 10 minute timeout for E2E test
|
|
132
|
+
|
|
133
|
+
test('Timeline analysis methods', async () => {
|
|
134
|
+
console.log('\n' + '='.repeat(70));
|
|
135
|
+
console.log('Testing Timeline Analysis Methods');
|
|
136
|
+
console.log('='.repeat(70));
|
|
137
|
+
|
|
138
|
+
// Upload and wait for processing
|
|
139
|
+
console.log(`\nUploading: ${path.basename(TEST_LONG_AUDIO)}`);
|
|
140
|
+
const requestId = await client.asynch.upload(TEST_LONG_AUDIO);
|
|
141
|
+
console.log(`Request ID: ${requestId}`);
|
|
142
|
+
|
|
143
|
+
console.log('\nWaiting for processing...');
|
|
144
|
+
const result = await client.asynch.emotions(requestId, 50, 10000);
|
|
145
|
+
expect(result.status).toBe('completed');
|
|
146
|
+
console.log('✓ Processing complete');
|
|
147
|
+
|
|
148
|
+
// Test getTimeline()
|
|
149
|
+
console.log('\n1. Testing getTimeline()');
|
|
150
|
+
console.log('-'.repeat(70));
|
|
151
|
+
const timeline = await client.asynch.getTimeline(requestId);
|
|
152
|
+
|
|
153
|
+
expect(timeline).toBeDefined();
|
|
154
|
+
expect(timeline.length).toBeGreaterThan(0);
|
|
155
|
+
expect(timeline.every(point => 'emotion' in point)).toBe(true);
|
|
156
|
+
|
|
157
|
+
console.log(`✓ Timeline retrieved: ${timeline.length} data points`);
|
|
158
|
+
console.log(` First emotion: ${timeline[0].emotion}`);
|
|
159
|
+
console.log(` Last emotion: ${timeline[timeline.length - 1].emotion}`);
|
|
160
|
+
|
|
161
|
+
// Test getEmotionAtTime()
|
|
162
|
+
console.log('\n2. Testing getEmotionAtTime()');
|
|
163
|
+
console.log('-'.repeat(70));
|
|
164
|
+
const midPoint = timeline[Math.floor(timeline.length / 2)];
|
|
165
|
+
const midTime = midPoint.timestamp;
|
|
166
|
+
|
|
167
|
+
const emotionAtTime = await client.asynch.getEmotionAtTime(requestId, midTime);
|
|
168
|
+
|
|
169
|
+
expect(emotionAtTime).toBeDefined();
|
|
170
|
+
expect(emotionAtTime).toHaveProperty('emotion');
|
|
171
|
+
|
|
172
|
+
console.log(`✓ Emotion at ${midTime.toFixed(1)}s: ${emotionAtTime.emotion}`);
|
|
173
|
+
console.log(` Confidence: ${emotionAtTime.confidence ? (emotionAtTime.confidence * 100).toFixed(1) + '%' : 'N/A'}`);
|
|
174
|
+
|
|
175
|
+
// Test getDominantEmotion()
|
|
176
|
+
console.log('\n3. Testing getDominantEmotion()');
|
|
177
|
+
console.log('-'.repeat(70));
|
|
178
|
+
const dominant = await client.asynch.getDominantEmotion(requestId);
|
|
179
|
+
|
|
180
|
+
expect(dominant).toBeDefined();
|
|
181
|
+
expect(typeof dominant).toBe('string');
|
|
182
|
+
|
|
183
|
+
// Count emotions to verify
|
|
184
|
+
const emotionCounts = {};
|
|
185
|
+
timeline.forEach(point => {
|
|
186
|
+
const emotion = point.emotion;
|
|
187
|
+
emotionCounts[emotion] = (emotionCounts[emotion] || 0) + 1;
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
console.log(`✓ Dominant emotion: ${dominant}`);
|
|
191
|
+
console.log(' Emotion distribution:');
|
|
192
|
+
Object.entries(emotionCounts)
|
|
193
|
+
.sort(([, a], [, b]) => b - a)
|
|
194
|
+
.forEach(([emotion, count]) => {
|
|
195
|
+
const percentage = (count / timeline.length) * 100;
|
|
196
|
+
console.log(` ${emotion}: ${count} (${percentage.toFixed(1)}%)`);
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
console.log('\n' + '='.repeat(70));
|
|
200
|
+
console.log('✅ TIMELINE METHODS TEST PASSED');
|
|
201
|
+
console.log('='.repeat(70));
|
|
202
|
+
}, 600000);
|
|
203
|
+
|
|
204
|
+
test('Status progression during processing', async () => {
|
|
205
|
+
console.log('\n' + '='.repeat(70));
|
|
206
|
+
console.log('Testing Status Progression');
|
|
207
|
+
console.log('='.repeat(70));
|
|
208
|
+
|
|
209
|
+
// Upload
|
|
210
|
+
console.log(`\nUploading: ${path.basename(TEST_LONG_AUDIO)}`);
|
|
211
|
+
const requestId = await client.asynch.upload(TEST_LONG_AUDIO);
|
|
212
|
+
console.log(`✓ Upload complete. Request ID: ${requestId}`);
|
|
213
|
+
|
|
214
|
+
// Poll with small intervals to catch status changes
|
|
215
|
+
console.log('\nPolling status with 5-second intervals...');
|
|
216
|
+
console.log('Expected progression: initiated → upload_completed → processing → completed');
|
|
217
|
+
console.log('-'.repeat(70));
|
|
218
|
+
|
|
219
|
+
const statusesSeen = [];
|
|
220
|
+
const maxChecks = 50;
|
|
221
|
+
|
|
222
|
+
for (let i = 0; i < maxChecks; i++) {
|
|
223
|
+
await new Promise(resolve => setTimeout(resolve, 5000));
|
|
224
|
+
|
|
225
|
+
try {
|
|
226
|
+
const url = `${baseUrl}/v1/asynch/emotion/status/${requestId}`;
|
|
227
|
+
const response = await axios.get(url, {
|
|
228
|
+
headers: { 'x-api-key': apiKey }
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
if (response.status === 200) {
|
|
232
|
+
const status = response.data.status;
|
|
233
|
+
|
|
234
|
+
if (status && !statusesSeen.includes(status)) {
|
|
235
|
+
statusesSeen.push(status);
|
|
236
|
+
console.log(` [${String(i * 5).padStart(3)}s] Status changed to: ${status}`);
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
if (status === 'completed') {
|
|
240
|
+
console.log(`\n✓ Processing completed after ${i * 5} seconds`);
|
|
241
|
+
break;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
} catch (error) {
|
|
245
|
+
console.log(` Warning: Status check error: ${error.message}`);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
console.log(`\nStatus progression observed: ${statusesSeen.join(' → ')}`);
|
|
250
|
+
|
|
251
|
+
expect(statusesSeen).toContain('completed');
|
|
252
|
+
|
|
253
|
+
console.log('\n' + '='.repeat(70));
|
|
254
|
+
console.log('✅ STATUS PROGRESSION TEST PASSED');
|
|
255
|
+
console.log('='.repeat(70));
|
|
256
|
+
}, 600000);
|
|
257
|
+
|
|
258
|
+
test('request_id is tracking ID (NOT completion signal)', async () => {
|
|
259
|
+
console.log('\n' + '='.repeat(70));
|
|
260
|
+
console.log('Testing: request_id is Tracking ID (NOT Completion Signal)');
|
|
261
|
+
console.log('='.repeat(70));
|
|
262
|
+
|
|
263
|
+
// Measure upload time
|
|
264
|
+
console.log(`\nUploading: ${path.basename(TEST_LONG_AUDIO)}`);
|
|
265
|
+
console.log('Measuring time to receive request_id...');
|
|
266
|
+
|
|
267
|
+
const startTime = Date.now();
|
|
268
|
+
const requestId = await client.asynch.upload(TEST_LONG_AUDIO);
|
|
269
|
+
const uploadDuration = (Date.now() - startTime) / 1000;
|
|
270
|
+
|
|
271
|
+
console.log(`\n✓ request_id received: ${requestId}`);
|
|
272
|
+
console.log(` Time to get request_id: ${uploadDuration.toFixed(2)} seconds`);
|
|
273
|
+
console.log(' ⚠️ This is ONLY the S3 upload time');
|
|
274
|
+
|
|
275
|
+
// Verify request_id is available quickly
|
|
276
|
+
expect(requestId).toBeDefined();
|
|
277
|
+
expect(typeof requestId).toBe('string');
|
|
278
|
+
expect(uploadDuration).toBeLessThan(60); // Should be relatively quick
|
|
279
|
+
|
|
280
|
+
// Now measure actual processing time
|
|
281
|
+
console.log('\nNow polling for actual processing completion...');
|
|
282
|
+
console.log('This will take longer because server needs to process audio...');
|
|
283
|
+
|
|
284
|
+
const processStart = Date.now();
|
|
285
|
+
const result = await client.asynch.emotions(requestId, 50, 10000);
|
|
286
|
+
const processDuration = (Date.now() - processStart) / 1000;
|
|
287
|
+
|
|
288
|
+
console.log(`\n✓ Processing complete: ${result.status}`);
|
|
289
|
+
console.log(` Time to process: ${processDuration.toFixed(2)} seconds`);
|
|
290
|
+
|
|
291
|
+
console.log('\n' + '-'.repeat(70));
|
|
292
|
+
console.log('PROOF THAT request_id ≠ COMPLETION:');
|
|
293
|
+
console.log('-'.repeat(70));
|
|
294
|
+
console.log(` Upload returned request_id in: ${uploadDuration.toFixed(2).padStart(6)} seconds`);
|
|
295
|
+
console.log(` But processing actually took: ${processDuration.toFixed(2).padStart(6)} seconds`);
|
|
296
|
+
console.log(` Difference: ${(processDuration - uploadDuration).toFixed(2).padStart(6)} seconds`);
|
|
297
|
+
console.log('\n ✓ Therefore: request_id is a TRACKING ID, not a completion signal');
|
|
298
|
+
|
|
299
|
+
// Processing should take longer than just getting request_id
|
|
300
|
+
expect(processDuration).toBeGreaterThan(uploadDuration);
|
|
301
|
+
|
|
302
|
+
console.log('\n' + '='.repeat(70));
|
|
303
|
+
console.log('✅ TRACKING ID VALIDATION TEST PASSED');
|
|
304
|
+
console.log('='.repeat(70));
|
|
305
|
+
}, 600000);
|
|
306
|
+
|
|
307
|
+
test('Async workflow with short audio', async () => {
|
|
308
|
+
console.log('\n' + '='.repeat(70));
|
|
309
|
+
console.log('Testing Async Workflow with Short Audio');
|
|
310
|
+
console.log('='.repeat(70));
|
|
311
|
+
|
|
312
|
+
console.log(`\nUploading: ${path.basename(TEST_SHORT_AUDIO)}`);
|
|
313
|
+
|
|
314
|
+
// Upload
|
|
315
|
+
const requestId = await client.asynch.upload(TEST_SHORT_AUDIO);
|
|
316
|
+
console.log(`✓ Request ID: ${requestId}`);
|
|
317
|
+
|
|
318
|
+
// Process
|
|
319
|
+
console.log('\nProcessing...');
|
|
320
|
+
const result = await client.asynch.emotions(requestId, 30, 10000);
|
|
321
|
+
|
|
322
|
+
console.log(`✓ Status: ${result.status}`);
|
|
323
|
+
console.log(` Emotion data points: ${result.emotions.length}`);
|
|
324
|
+
|
|
325
|
+
expect(result.status).toBe('completed');
|
|
326
|
+
expect(result.emotions.length).toBeGreaterThan(0);
|
|
327
|
+
|
|
328
|
+
console.log('\n' + '='.repeat(70));
|
|
329
|
+
console.log('✅ SHORT AUDIO TEST PASSED');
|
|
330
|
+
console.log('='.repeat(70));
|
|
331
|
+
}, 600000);
|
|
332
|
+
});
|
|
333
|
+
|
|
334
|
+
if (process.env.VALENCE_E2E_TEST !== 'true') {
|
|
335
|
+
console.log('\n' + '='.repeat(70));
|
|
336
|
+
console.log('Valence SDK - Async API E2E Tests');
|
|
337
|
+
console.log('='.repeat(70));
|
|
338
|
+
console.log('\nTo run these tests:');
|
|
339
|
+
console.log(' export VALENCE_E2E_TEST=true');
|
|
340
|
+
console.log(' export VALENCE_API_KEY=your_api_key');
|
|
341
|
+
console.log(' npm test -- tests/e2e.asyncWorkflow.test.js');
|
|
342
|
+
console.log('\n' + '='.repeat(70));
|
|
343
|
+
}
|