@aj-archipelago/cortex 1.3.10 → 1.3.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config.js +15 -0
- package/helper-apps/cortex-file-handler/.env.test +7 -0
- package/helper-apps/cortex-file-handler/.env.test.azure +6 -0
- package/helper-apps/cortex-file-handler/.env.test.gcs +9 -0
- package/helper-apps/cortex-file-handler/blobHandler.js +263 -179
- package/helper-apps/cortex-file-handler/constants.js +107 -0
- package/helper-apps/cortex-file-handler/docHelper.js +4 -1
- package/helper-apps/cortex-file-handler/fileChunker.js +171 -109
- package/helper-apps/cortex-file-handler/helper.js +39 -17
- package/helper-apps/cortex-file-handler/index.js +230 -138
- package/helper-apps/cortex-file-handler/localFileHandler.js +21 -3
- package/helper-apps/cortex-file-handler/package-lock.json +2622 -51
- package/helper-apps/cortex-file-handler/package.json +24 -4
- package/helper-apps/cortex-file-handler/redis.js +9 -18
- package/helper-apps/cortex-file-handler/scripts/setup-azure-container.js +22 -0
- package/helper-apps/cortex-file-handler/scripts/setup-test-containers.js +49 -0
- package/helper-apps/cortex-file-handler/scripts/test-azure.sh +34 -0
- package/helper-apps/cortex-file-handler/scripts/test-gcs.sh +49 -0
- package/helper-apps/cortex-file-handler/start.js +26 -4
- package/helper-apps/cortex-file-handler/tests/docHelper.test.js +148 -0
- package/helper-apps/cortex-file-handler/tests/fileChunker.test.js +322 -0
- package/helper-apps/cortex-file-handler/tests/start.test.js +928 -0
- package/helper-apps/cortex-realtime-voice-server/client/src/chat/components/ScreenshotCapture.tsx +57 -9
- package/helper-apps/cortex-realtime-voice-server/src/SocketServer.ts +35 -22
- package/helper-apps/cortex-realtime-voice-server/src/Tools.ts +65 -14
- package/helper-apps/cortex-realtime-voice-server/src/realtime/client.ts +10 -10
- package/helper-apps/cortex-realtime-voice-server/src/realtime/socket.ts +2 -1
- package/package.json +1 -1
- package/pathways/system/entity/sys_entity_continue.js +1 -1
- package/pathways/system/entity/sys_entity_start.js +1 -0
- package/pathways/system/entity/sys_generator_reasoning.js +1 -1
- package/pathways/system/entity/sys_generator_video_vision.js +2 -1
- package/pathways/system/entity/sys_router_tool.js +6 -4
- package/pathways/system/rest_streaming/sys_openai_chat_o1.js +19 -0
- package/pathways/system/rest_streaming/sys_openai_chat_o1_mini.js +19 -0
- package/server/plugins/openAiReasoningPlugin.js +11 -2
- package/server/plugins/openAiWhisperPlugin.js +9 -13
|
@@ -0,0 +1,928 @@
|
|
|
1
|
+
import test from 'ava';
|
|
2
|
+
import axios from 'axios';
|
|
3
|
+
import FormData from 'form-data';
|
|
4
|
+
import { port, publicFolder, ipAddress } from '../start.js';
|
|
5
|
+
import { v4 as uuidv4 } from 'uuid';
|
|
6
|
+
|
|
7
|
+
// Add these helper functions at the top after imports
|
|
8
|
+
const baseUrl = `http://localhost:${port}/api/CortexFileHandler`;
|
|
9
|
+
|
|
10
|
+
// Helper function to determine if Azure is configured
|
|
11
|
+
function isAzureConfigured() {
|
|
12
|
+
return process.env.AZURE_STORAGE_CONNECTION_STRING &&
|
|
13
|
+
process.env.AZURE_STORAGE_CONNECTION_STRING !== 'UseDevelopmentStorage=true';
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// Helper function to convert URLs for testing
|
|
17
|
+
function convertToLocalUrl(url) {
|
|
18
|
+
// If it's an Azurite URL (contains 127.0.0.1:10000), use it as is
|
|
19
|
+
if (url.includes('127.0.0.1:10000')) {
|
|
20
|
+
return url;
|
|
21
|
+
}
|
|
22
|
+
// For local storage URLs, convert any IP:port to localhost:port
|
|
23
|
+
const urlObj = new URL(url);
|
|
24
|
+
return url.replace(urlObj.host, `localhost:${port}`);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Helper function to clean up uploaded files
|
|
28
|
+
async function cleanupUploadedFile(t, url) {
|
|
29
|
+
// Convert URL to use localhost
|
|
30
|
+
url = convertToLocalUrl(url);
|
|
31
|
+
const folderName = getFolderNameFromUrl(url);
|
|
32
|
+
|
|
33
|
+
// Delete the file
|
|
34
|
+
const deleteResponse = await axios.delete(`${baseUrl}?operation=delete&requestId=${folderName}`);
|
|
35
|
+
t.is(deleteResponse.status, 200, 'Delete should succeed');
|
|
36
|
+
t.true(Array.isArray(deleteResponse.data.body), 'Delete response should be an array');
|
|
37
|
+
t.true(deleteResponse.data.body.length > 0, 'Should have deleted at least one file');
|
|
38
|
+
|
|
39
|
+
// Verify file is gone
|
|
40
|
+
const verifyResponse = await axios.get(url, {
|
|
41
|
+
validateStatus: status => true,
|
|
42
|
+
timeout: 5000
|
|
43
|
+
});
|
|
44
|
+
t.is(verifyResponse.status, 404, 'File should not exist after deletion');
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Helper function to get folder name from URL
|
|
48
|
+
function getFolderNameFromUrl(url) {
|
|
49
|
+
const urlObj = new URL(url);
|
|
50
|
+
const parts = urlObj.pathname.split('/');
|
|
51
|
+
// For Azure URLs (contains 127.0.0.1:10000), folder name is at index 3
|
|
52
|
+
if (url.includes('127.0.0.1:10000')) {
|
|
53
|
+
return parts[3].split('_')[0];
|
|
54
|
+
}
|
|
55
|
+
// For local storage URLs, folder name is at index 2
|
|
56
|
+
return parts[2].split('_')[0];
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Helper function to upload files
|
|
60
|
+
async function uploadFile(file, requestId, hash = null) {
|
|
61
|
+
const form = new FormData();
|
|
62
|
+
form.append('file', file);
|
|
63
|
+
if (requestId) form.append('requestId', requestId);
|
|
64
|
+
if (hash) form.append('hash', hash);
|
|
65
|
+
|
|
66
|
+
const response = await axios.post(baseUrl, form, {
|
|
67
|
+
headers: {
|
|
68
|
+
...form.getHeaders(),
|
|
69
|
+
'Content-Type': 'multipart/form-data'
|
|
70
|
+
},
|
|
71
|
+
validateStatus: status => true,
|
|
72
|
+
timeout: 5000
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
if (response.data?.url) {
|
|
76
|
+
response.data.url = convertToLocalUrl(response.data.url);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return response;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Ensure server is ready before tests
|
|
83
|
+
test.before(async t => {
|
|
84
|
+
// Wait for server to be ready
|
|
85
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
86
|
+
|
|
87
|
+
// Verify server is responding
|
|
88
|
+
try {
|
|
89
|
+
await axios.get(`http://localhost:${port}/files`);
|
|
90
|
+
} catch (error) {
|
|
91
|
+
// 404 is fine, it means server is running but directory is empty
|
|
92
|
+
if (error.response?.status !== 404) {
|
|
93
|
+
throw new Error('Server not ready');
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
// Configuration Tests
|
|
99
|
+
test('should have valid server configuration', t => {
|
|
100
|
+
t.truthy(port, 'Port should be defined');
|
|
101
|
+
t.truthy(publicFolder, 'Public folder should be defined');
|
|
102
|
+
t.truthy(ipAddress, 'IP address should be defined');
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
// Parameter Validation Tests
|
|
106
|
+
test.serial('should validate required parameters on CortexFileHandler endpoint', async t => {
|
|
107
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
108
|
+
validateStatus: status => true,
|
|
109
|
+
timeout: 5000
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
t.is(response.status, 400, 'Should return 400 for missing parameters');
|
|
113
|
+
t.is(
|
|
114
|
+
response.data,
|
|
115
|
+
'Please pass a uri and requestId on the query string or in the request body',
|
|
116
|
+
'Should return proper error message'
|
|
117
|
+
);
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
test.serial('should validate required parameters on MediaFileChunker legacy endpoint', async t => {
|
|
121
|
+
const response = await axios.get(`http://localhost:${port}/api/MediaFileChunker`, {
|
|
122
|
+
validateStatus: status => true,
|
|
123
|
+
timeout: 5000
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
t.is(response.status, 400, 'Should return 400 for missing parameters');
|
|
127
|
+
t.is(
|
|
128
|
+
response.data,
|
|
129
|
+
'Please pass a uri and requestId on the query string or in the request body',
|
|
130
|
+
'Should return proper error message'
|
|
131
|
+
);
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
// Static Files Tests
|
|
135
|
+
test.serial('should serve static files from public directory', async t => {
|
|
136
|
+
try {
|
|
137
|
+
const response = await axios.get(`http://localhost:${port}/files`, {
|
|
138
|
+
timeout: 5000,
|
|
139
|
+
validateStatus: status => status === 200 || status === 404
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
t.true(
|
|
143
|
+
response.status === 200 || response.status === 404,
|
|
144
|
+
'Should respond with 200 or 404 for static files'
|
|
145
|
+
);
|
|
146
|
+
} catch (error) {
|
|
147
|
+
t.fail(`Failed to connect to files endpoint: ${error.message}`);
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
// Hash Operation Tests
|
|
152
|
+
test.serial('should handle non-existent hash check', async t => {
|
|
153
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
154
|
+
params: {
|
|
155
|
+
hash: 'nonexistent-hash',
|
|
156
|
+
checkHash: true
|
|
157
|
+
},
|
|
158
|
+
validateStatus: status => true,
|
|
159
|
+
timeout: 5000
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
t.is(response.status, 404, 'Should return 404 for non-existent hash');
|
|
163
|
+
t.is(response.data, 'Hash nonexistent-hash not found', 'Should return proper error message');
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
test.serial('should handle hash clearing for non-existent hash', async t => {
|
|
167
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
168
|
+
params: {
|
|
169
|
+
hash: 'nonexistent-hash',
|
|
170
|
+
clearHash: true
|
|
171
|
+
},
|
|
172
|
+
validateStatus: status => true,
|
|
173
|
+
timeout: 5000
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
t.is(response.status, 404, 'Should return 404 for non-existent hash');
|
|
177
|
+
t.is(response.data, 'Hash nonexistent-hash not found', 'Should return proper message');
|
|
178
|
+
});
|
|
179
|
+
|
|
180
|
+
test.serial('should handle hash operations without hash parameter', async t => {
|
|
181
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
182
|
+
params: {
|
|
183
|
+
checkHash: true
|
|
184
|
+
},
|
|
185
|
+
validateStatus: status => true,
|
|
186
|
+
timeout: 5000
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
t.is(response.status, 400, 'Should return 400 for missing hash');
|
|
190
|
+
t.is(
|
|
191
|
+
response.data,
|
|
192
|
+
'Please pass a uri and requestId on the query string or in the request body',
|
|
193
|
+
'Should return proper error message'
|
|
194
|
+
);
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
// URL Validation Tests
|
|
198
|
+
test.serial('should reject invalid URLs', async t => {
|
|
199
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
200
|
+
params: {
|
|
201
|
+
uri: 'not-a-valid-url',
|
|
202
|
+
requestId: 'test-request'
|
|
203
|
+
},
|
|
204
|
+
validateStatus: status => true,
|
|
205
|
+
timeout: 5000
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
t.is(response.status, 500, 'Should return 500 for invalid URL');
|
|
209
|
+
t.true(response.data.includes('Invalid URL'), 'Should indicate invalid URL in error message');
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
test.serial('should reject unsupported protocols', async t => {
|
|
213
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
214
|
+
params: {
|
|
215
|
+
uri: 'ftp://example.com/test.mp3',
|
|
216
|
+
requestId: 'test-request'
|
|
217
|
+
},
|
|
218
|
+
validateStatus: status => true,
|
|
219
|
+
timeout: 5000
|
|
220
|
+
});
|
|
221
|
+
|
|
222
|
+
t.is(response.status, 500, 'Should return 500 for unsupported protocol');
|
|
223
|
+
t.true(
|
|
224
|
+
response.data.includes('Error processing media file'),
|
|
225
|
+
'Should indicate error processing media file'
|
|
226
|
+
);
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
// Remote File Operation Tests
|
|
230
|
+
test.serial('should validate remote file URL format', async t => {
|
|
231
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
232
|
+
params: {
|
|
233
|
+
fetch: 'not-a-valid-url'
|
|
234
|
+
},
|
|
235
|
+
validateStatus: status => true,
|
|
236
|
+
timeout: 5000
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
t.is(response.status, 400, 'Should return 400 for invalid remote URL');
|
|
240
|
+
t.is(response.data, 'Invalid or inaccessible URL', 'Should return proper error message');
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
test.serial('should handle restore operation with invalid URL', async t => {
|
|
244
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
245
|
+
params: {
|
|
246
|
+
restore: 'not-a-valid-url'
|
|
247
|
+
},
|
|
248
|
+
validateStatus: status => true,
|
|
249
|
+
timeout: 5000
|
|
250
|
+
});
|
|
251
|
+
|
|
252
|
+
t.is(response.status, 400, 'Should return 400 for invalid restore URL');
|
|
253
|
+
t.is(response.data, 'Invalid or inaccessible URL', 'Should return proper error message');
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
test.serial('should handle load operation with invalid URL', async t => {
|
|
257
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
258
|
+
params: {
|
|
259
|
+
load: 'not-a-valid-url'
|
|
260
|
+
},
|
|
261
|
+
validateStatus: status => true,
|
|
262
|
+
timeout: 5000
|
|
263
|
+
});
|
|
264
|
+
|
|
265
|
+
t.is(response.status, 400, 'Should return 400 for invalid load URL');
|
|
266
|
+
t.is(response.data, 'Invalid or inaccessible URL', 'Should return proper error message');
|
|
267
|
+
});
|
|
268
|
+
|
|
269
|
+
// Delete Operation Tests
|
|
270
|
+
test.serial('should validate requestId for delete operation', async t => {
|
|
271
|
+
const response = await axios.delete(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
272
|
+
validateStatus: status => true,
|
|
273
|
+
timeout: 5000
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
t.is(response.status, 400, 'Should return 400 for missing requestId');
|
|
277
|
+
t.is(
|
|
278
|
+
response.data,
|
|
279
|
+
'Please pass a requestId on the query string',
|
|
280
|
+
'Should return proper error message'
|
|
281
|
+
);
|
|
282
|
+
});
|
|
283
|
+
|
|
284
|
+
test.serial('should handle delete with valid requestId', async t => {
|
|
285
|
+
const testRequestId = 'test-delete-request';
|
|
286
|
+
const testContent = 'test content';
|
|
287
|
+
const form = new FormData();
|
|
288
|
+
form.append('file', Buffer.from(testContent), 'test.txt');
|
|
289
|
+
|
|
290
|
+
// Upload a file first
|
|
291
|
+
const uploadResponse = await axios.post(baseUrl, form, {
|
|
292
|
+
headers: form.getHeaders(),
|
|
293
|
+
validateStatus: status => true,
|
|
294
|
+
timeout: 5000
|
|
295
|
+
});
|
|
296
|
+
t.is(uploadResponse.status, 200, 'Upload should succeed');
|
|
297
|
+
|
|
298
|
+
// Extract the folder name from the URL
|
|
299
|
+
const url = uploadResponse.data.url;
|
|
300
|
+
const folderName = getFolderNameFromUrl(url);
|
|
301
|
+
|
|
302
|
+
// Delete the file
|
|
303
|
+
const deleteResponse = await axios.delete(`${baseUrl}?operation=delete&requestId=${folderName}`);
|
|
304
|
+
t.is(deleteResponse.status, 200, 'Delete should succeed');
|
|
305
|
+
t.true(Array.isArray(deleteResponse.data.body), 'Response should be an array of deleted files');
|
|
306
|
+
t.true(deleteResponse.data.body.length > 0, 'Should have deleted at least one file');
|
|
307
|
+
t.true(deleteResponse.data.body[0].includes(folderName), 'Deleted file should contain folder name');
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
test.serial('should handle delete with non-existent requestId', async t => {
|
|
311
|
+
const response = await axios.delete(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
312
|
+
params: {
|
|
313
|
+
requestId: 'nonexistent-request'
|
|
314
|
+
},
|
|
315
|
+
validateStatus: status => true,
|
|
316
|
+
timeout: 30000
|
|
317
|
+
});
|
|
318
|
+
|
|
319
|
+
t.is(response.status, 200, 'Should return 200 even for non-existent requestId');
|
|
320
|
+
t.deepEqual(response.data.body, [], 'Should return empty array for non-existent requestId');
|
|
321
|
+
});
|
|
322
|
+
|
|
323
|
+
test('should handle delete with invalid requestId', async t => {
|
|
324
|
+
const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
325
|
+
params: {
|
|
326
|
+
requestId: 'nonexistent-request',
|
|
327
|
+
operation: 'delete'
|
|
328
|
+
},
|
|
329
|
+
timeout: 5000
|
|
330
|
+
});
|
|
331
|
+
t.is(response.status, 200, 'Should return 200 for delete with invalid requestId');
|
|
332
|
+
t.true(Array.isArray(response.data.body), 'Response should be an array');
|
|
333
|
+
t.is(response.data.body.length, 0, 'Response should be empty array for non-existent requestId');
|
|
334
|
+
});
|
|
335
|
+
|
|
336
|
+
// POST Operation Tests
|
|
337
|
+
test('should handle empty POST request', async t => {
|
|
338
|
+
const form = new FormData();
|
|
339
|
+
try {
|
|
340
|
+
await axios.post(
|
|
341
|
+
`http://localhost:${port}/api/CortexFileHandler`,
|
|
342
|
+
form,
|
|
343
|
+
{
|
|
344
|
+
headers: form.getHeaders(),
|
|
345
|
+
timeout: 5000
|
|
346
|
+
}
|
|
347
|
+
);
|
|
348
|
+
t.fail('Should have thrown error');
|
|
349
|
+
} catch (error) {
|
|
350
|
+
t.is(error.response.status, 400, 'Should return 400 for empty POST request');
|
|
351
|
+
t.is(error.response.data, 'No file provided in request', 'Should return proper error message');
|
|
352
|
+
}
|
|
353
|
+
});
|
|
354
|
+
|
|
355
|
+
// Upload Tests
|
|
356
|
+
test.serial('should handle successful file upload with hash', async t => {
|
|
357
|
+
const form = new FormData();
|
|
358
|
+
const testHash = 'test-hash-123';
|
|
359
|
+
const testContent = 'test content';
|
|
360
|
+
form.append('file', Buffer.from(testContent), 'test.txt');
|
|
361
|
+
form.append('hash', testHash);
|
|
362
|
+
|
|
363
|
+
// Upload file with hash
|
|
364
|
+
const uploadResponse = await axios.post(
|
|
365
|
+
`http://localhost:${port}/api/CortexFileHandler`,
|
|
366
|
+
form,
|
|
367
|
+
{
|
|
368
|
+
headers: {
|
|
369
|
+
...form.getHeaders(),
|
|
370
|
+
'Content-Type': 'multipart/form-data'
|
|
371
|
+
},
|
|
372
|
+
validateStatus: status => true,
|
|
373
|
+
timeout: 5000
|
|
374
|
+
}
|
|
375
|
+
);
|
|
376
|
+
|
|
377
|
+
t.is(uploadResponse.status, 200, 'Upload should succeed');
|
|
378
|
+
t.truthy(uploadResponse.data.url, 'Response should contain file URL');
|
|
379
|
+
|
|
380
|
+
// Wait a bit for Redis to be updated
|
|
381
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
382
|
+
|
|
383
|
+
// Verify hash exists and returns the file info
|
|
384
|
+
const hashCheckResponse = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
385
|
+
params: {
|
|
386
|
+
hash: testHash,
|
|
387
|
+
checkHash: true
|
|
388
|
+
},
|
|
389
|
+
validateStatus: status => true,
|
|
390
|
+
timeout: 5000
|
|
391
|
+
});
|
|
392
|
+
|
|
393
|
+
t.is(hashCheckResponse.status, 404, 'Hash check should return 404 for new hash');
|
|
394
|
+
t.is(hashCheckResponse.data, `Hash ${testHash} not found`, 'Should indicate hash not found');
|
|
395
|
+
|
|
396
|
+
await cleanupUploadedFile(t, uploadResponse.data.url);
|
|
397
|
+
});
|
|
398
|
+
|
|
399
|
+
test.serial('should handle hash clearing', async t => {
|
|
400
|
+
const testHash = 'test-hash-to-clear';
|
|
401
|
+
const form = new FormData();
|
|
402
|
+
form.append('file', Buffer.from('test content'), 'test.txt');
|
|
403
|
+
form.append('hash', testHash);
|
|
404
|
+
|
|
405
|
+
// First upload a file with the hash
|
|
406
|
+
const uploadResponse = await axios.post(
|
|
407
|
+
`http://localhost:${port}/api/CortexFileHandler`,
|
|
408
|
+
form,
|
|
409
|
+
{
|
|
410
|
+
headers: {
|
|
411
|
+
...form.getHeaders(),
|
|
412
|
+
'Content-Type': 'multipart/form-data'
|
|
413
|
+
},
|
|
414
|
+
validateStatus: status => true,
|
|
415
|
+
timeout: 5000
|
|
416
|
+
}
|
|
417
|
+
);
|
|
418
|
+
|
|
419
|
+
t.is(uploadResponse.status, 200, 'Upload should succeed');
|
|
420
|
+
t.truthy(uploadResponse.data.url, 'Response should contain file URL');
|
|
421
|
+
|
|
422
|
+
// Wait a bit for Redis to be updated
|
|
423
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
424
|
+
|
|
425
|
+
// Clear the hash
|
|
426
|
+
const clearResponse = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
427
|
+
params: {
|
|
428
|
+
hash: testHash,
|
|
429
|
+
clearHash: true
|
|
430
|
+
},
|
|
431
|
+
validateStatus: status => true,
|
|
432
|
+
timeout: 5000
|
|
433
|
+
});
|
|
434
|
+
|
|
435
|
+
t.is(clearResponse.status, 404, 'Hash clearing should return 404 for new hash');
|
|
436
|
+
t.is(clearResponse.data, `Hash ${testHash} not found`, 'Should indicate hash not found');
|
|
437
|
+
|
|
438
|
+
// Verify hash no longer exists
|
|
439
|
+
const verifyResponse = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
|
|
440
|
+
params: {
|
|
441
|
+
hash: testHash,
|
|
442
|
+
checkHash: true
|
|
443
|
+
},
|
|
444
|
+
validateStatus: status => true,
|
|
445
|
+
timeout: 5000
|
|
446
|
+
});
|
|
447
|
+
|
|
448
|
+
t.is(verifyResponse.status, 404, 'Hash should not exist');
|
|
449
|
+
t.is(verifyResponse.data, `Hash ${testHash} not found`, 'Should indicate hash not found');
|
|
450
|
+
|
|
451
|
+
// Clean up the uploaded file
|
|
452
|
+
await cleanupUploadedFile(t, uploadResponse.data.url);
|
|
453
|
+
});
|
|
454
|
+
|
|
455
|
+
test.serial('should handle file upload without hash', async t => {
|
|
456
|
+
const form = new FormData();
|
|
457
|
+
form.append('file', Buffer.from('test content'), 'test.txt');
|
|
458
|
+
|
|
459
|
+
const response = await axios.post(
|
|
460
|
+
`http://localhost:${port}/api/CortexFileHandler`,
|
|
461
|
+
form,
|
|
462
|
+
{
|
|
463
|
+
headers: {
|
|
464
|
+
...form.getHeaders(),
|
|
465
|
+
'Content-Type': 'multipart/form-data'
|
|
466
|
+
},
|
|
467
|
+
validateStatus: status => true,
|
|
468
|
+
timeout: 5000
|
|
469
|
+
}
|
|
470
|
+
);
|
|
471
|
+
|
|
472
|
+
t.is(response.status, 200, 'Upload should succeed');
|
|
473
|
+
t.truthy(response.data.url, 'Response should contain file URL');
|
|
474
|
+
|
|
475
|
+
await cleanupUploadedFile(t, response.data.url);
|
|
476
|
+
});
|
|
477
|
+
|
|
478
|
+
test.serial('should handle upload with empty file', async t => {
|
|
479
|
+
const form = new FormData();
|
|
480
|
+
// Empty file
|
|
481
|
+
form.append('file', Buffer.from(''), 'empty.txt');
|
|
482
|
+
|
|
483
|
+
const response = await axios.post(
|
|
484
|
+
`http://localhost:${port}/api/CortexFileHandler`,
|
|
485
|
+
form,
|
|
486
|
+
{
|
|
487
|
+
headers: {
|
|
488
|
+
...form.getHeaders(),
|
|
489
|
+
'Content-Type': 'multipart/form-data'
|
|
490
|
+
},
|
|
491
|
+
validateStatus: status => true,
|
|
492
|
+
timeout: 5000
|
|
493
|
+
}
|
|
494
|
+
);
|
|
495
|
+
|
|
496
|
+
t.is(response.status, 200, 'Should accept empty file');
|
|
497
|
+
t.truthy(response.data.url, 'Should return URL for empty file');
|
|
498
|
+
|
|
499
|
+
await cleanupUploadedFile(t, response.data.url);
|
|
500
|
+
});
|
|
501
|
+
|
|
502
|
+
test.serial('should handle complete upload-request-delete-verify sequence', async t => {
|
|
503
|
+
const testContent = 'test content for sequence';
|
|
504
|
+
const testHash = 'test-sequence-hash';
|
|
505
|
+
const form = new FormData();
|
|
506
|
+
form.append('file', Buffer.from(testContent), 'sequence-test.txt');
|
|
507
|
+
form.append('hash', testHash);
|
|
508
|
+
|
|
509
|
+
// Upload file with hash
|
|
510
|
+
const uploadResponse = await axios.post(baseUrl, form, {
|
|
511
|
+
headers: form.getHeaders(),
|
|
512
|
+
validateStatus: status => true,
|
|
513
|
+
timeout: 5000
|
|
514
|
+
});
|
|
515
|
+
t.is(uploadResponse.status, 200, 'Upload should succeed');
|
|
516
|
+
t.truthy(uploadResponse.data.url, 'Response should contain URL');
|
|
517
|
+
|
|
518
|
+
await cleanupUploadedFile(t, uploadResponse.data.url);
|
|
519
|
+
|
|
520
|
+
// Verify hash is gone by trying to get the file URL
|
|
521
|
+
const hashCheckResponse = await axios.get(`${baseUrl}`, {
|
|
522
|
+
params: {
|
|
523
|
+
hash: testHash,
|
|
524
|
+
checkHash: true
|
|
525
|
+
},
|
|
526
|
+
validateStatus: status => true
|
|
527
|
+
});
|
|
528
|
+
t.is(hashCheckResponse.status, 404, 'Hash should not exist after deletion');
|
|
529
|
+
});
|
|
530
|
+
|
|
531
|
+
test.serial('should handle multiple file uploads with unique hashes', async t => {
|
|
532
|
+
const uploadedFiles = [];
|
|
533
|
+
|
|
534
|
+
// Upload 10 files
|
|
535
|
+
for (let i = 0; i < 10; i++) {
|
|
536
|
+
const content = `test content for file ${i}`;
|
|
537
|
+
const form = new FormData();
|
|
538
|
+
form.append('file', Buffer.from(content), `file-${i}.txt`);
|
|
539
|
+
|
|
540
|
+
const uploadResponse = await axios.post(baseUrl, form, {
|
|
541
|
+
headers: form.getHeaders(),
|
|
542
|
+
validateStatus: status => true,
|
|
543
|
+
timeout: 5000
|
|
544
|
+
});
|
|
545
|
+
t.is(uploadResponse.status, 200, `Upload should succeed for file ${i}`);
|
|
546
|
+
|
|
547
|
+
const url = uploadResponse.data.url;
|
|
548
|
+
t.truthy(url, `Response should contain URL for file ${i}`);
|
|
549
|
+
|
|
550
|
+
uploadedFiles.push({
|
|
551
|
+
url: convertToLocalUrl(url),
|
|
552
|
+
content
|
|
553
|
+
});
|
|
554
|
+
|
|
555
|
+
// Small delay between uploads
|
|
556
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
// Verify files are stored and can be fetched
|
|
560
|
+
for (const file of uploadedFiles) {
|
|
561
|
+
const fileResponse = await axios.get(file.url, {
|
|
562
|
+
validateStatus: status => true,
|
|
563
|
+
timeout: 5000
|
|
564
|
+
});
|
|
565
|
+
t.is(fileResponse.status, 200, `File should be accessible at ${file.url}`);
|
|
566
|
+
t.is(fileResponse.data, file.content, `File content should match original content`);
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
// Clean up all files
|
|
570
|
+
for (const file of uploadedFiles) {
|
|
571
|
+
await cleanupUploadedFile(t, file.url);
|
|
572
|
+
}
|
|
573
|
+
});
|
|
574
|
+
|
|
575
|
+
// Example of a hash-specific test that only runs with Azure
|
|
576
|
+
test.serial('should handle hash reuse with Azure storage', async t => {
|
|
577
|
+
if (!isAzureConfigured()) {
|
|
578
|
+
t.pass('Skipping hash test - Azure not configured');
|
|
579
|
+
return;
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
const testHash = 'test-hash-reuse';
|
|
583
|
+
const testContent = 'test content for hash reuse';
|
|
584
|
+
const form = new FormData();
|
|
585
|
+
form.append('file', Buffer.from(testContent), 'test.txt');
|
|
586
|
+
form.append('hash', testHash);
|
|
587
|
+
|
|
588
|
+
// First upload
|
|
589
|
+
const upload1 = await axios.post(baseUrl, form, {
|
|
590
|
+
headers: form.getHeaders(),
|
|
591
|
+
validateStatus: status => true,
|
|
592
|
+
timeout: 5000
|
|
593
|
+
});
|
|
594
|
+
t.is(upload1.status, 200, 'First upload should succeed');
|
|
595
|
+
const originalUrl = upload1.data.url;
|
|
596
|
+
|
|
597
|
+
// Check hash exists and returns the correct URL
|
|
598
|
+
const hashCheck1 = await axios.get(baseUrl, {
|
|
599
|
+
params: { hash: testHash, checkHash: true },
|
|
600
|
+
validateStatus: status => true
|
|
601
|
+
});
|
|
602
|
+
t.is(hashCheck1.status, 200, 'Hash should exist after first upload');
|
|
603
|
+
t.truthy(hashCheck1.data.url, 'Hash check should return URL');
|
|
604
|
+
t.is(hashCheck1.data.url, originalUrl, 'Hash check should return original upload URL');
|
|
605
|
+
|
|
606
|
+
// Verify file is accessible via URL from hash check
|
|
607
|
+
const fileResponse = await axios.get(convertToLocalUrl(hashCheck1.data.url), {
|
|
608
|
+
validateStatus: status => true,
|
|
609
|
+
timeout: 5000
|
|
610
|
+
});
|
|
611
|
+
t.is(fileResponse.status, 200, 'File should be accessible');
|
|
612
|
+
t.is(fileResponse.data, testContent, 'File content should match original');
|
|
613
|
+
|
|
614
|
+
// Second upload with same hash
|
|
615
|
+
const upload2 = await axios.post(baseUrl, form, {
|
|
616
|
+
headers: form.getHeaders(),
|
|
617
|
+
validateStatus: status => true,
|
|
618
|
+
timeout: 5000
|
|
619
|
+
});
|
|
620
|
+
t.is(upload2.status, 200, 'Second upload should succeed');
|
|
621
|
+
t.is(upload2.data.url, originalUrl, 'URLs should match for same hash');
|
|
622
|
+
|
|
623
|
+
// Verify file is still accessible after second upload
|
|
624
|
+
const fileResponse2 = await axios.get(convertToLocalUrl(upload2.data.url), {
|
|
625
|
+
validateStatus: status => true,
|
|
626
|
+
timeout: 5000
|
|
627
|
+
});
|
|
628
|
+
t.is(fileResponse2.status, 200, 'File should still be accessible');
|
|
629
|
+
t.is(fileResponse2.data, testContent, 'File content should still match original');
|
|
630
|
+
|
|
631
|
+
// Clean up
|
|
632
|
+
await cleanupUploadedFile(t, originalUrl);
|
|
633
|
+
|
|
634
|
+
// Verify hash is now gone
|
|
635
|
+
const hashCheckAfterDelete = await axios.get(baseUrl, {
|
|
636
|
+
params: { hash: testHash, checkHash: true },
|
|
637
|
+
validateStatus: status => true
|
|
638
|
+
});
|
|
639
|
+
t.is(hashCheckAfterDelete.status, 404, 'Hash should be gone after file deletion');
|
|
640
|
+
});
|
|
641
|
+
|
|
642
|
+
// Helper to check if GCS is configured
|
|
643
|
+
function isGCSConfigured() {
|
|
644
|
+
return process.env.GCP_SERVICE_ACCOUNT_KEY && process.env.STORAGE_EMULATOR_HOST;
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
// Helper function to check if file exists in fake GCS
|
|
648
|
+
async function checkGCSFile(gcsUrl) {
|
|
649
|
+
// Convert gs:// URL to bucket and object path
|
|
650
|
+
const [, , bucket, ...objectParts] = gcsUrl.split('/');
|
|
651
|
+
const object = objectParts.join('/');
|
|
652
|
+
|
|
653
|
+
// Query fake-gcs-server
|
|
654
|
+
const response = await axios.get(`http://localhost:4443/storage/v1/b/${bucket}/o/${encodeURIComponent(object)}`, {
|
|
655
|
+
validateStatus: status => true
|
|
656
|
+
});
|
|
657
|
+
return response.status === 200;
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
// Helper function to verify file exists in both storages
|
|
661
|
+
async function verifyFileInBothStorages(t, uploadResponse) {
|
|
662
|
+
// Verify Azure URL is accessible
|
|
663
|
+
const azureResponse = await axios.get(convertToLocalUrl(uploadResponse.data.url), {
|
|
664
|
+
validateStatus: status => true,
|
|
665
|
+
timeout: 5000
|
|
666
|
+
});
|
|
667
|
+
t.is(azureResponse.status, 200, 'File should be accessible in Azure');
|
|
668
|
+
|
|
669
|
+
if (isGCSConfigured()) {
|
|
670
|
+
// Verify GCS URL exists and is in correct format
|
|
671
|
+
t.truthy(uploadResponse.data.gcs, 'Response should contain GCS URL');
|
|
672
|
+
t.true(uploadResponse.data.gcs.startsWith('gs://'), 'GCS URL should use gs:// protocol');
|
|
673
|
+
|
|
674
|
+
// Check if file exists in fake GCS
|
|
675
|
+
const exists = await checkGCSFile(uploadResponse.data.gcs);
|
|
676
|
+
t.true(exists, 'File should exist in GCS');
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
|
|
680
|
+
// Helper function to verify file is deleted from both storages
|
|
681
|
+
async function verifyFileDeletedFromBothStorages(t, uploadResponse) {
|
|
682
|
+
// Verify Azure URL is no longer accessible
|
|
683
|
+
const azureResponse = await axios.get(convertToLocalUrl(uploadResponse.data.url), {
|
|
684
|
+
validateStatus: status => true,
|
|
685
|
+
timeout: 5000
|
|
686
|
+
});
|
|
687
|
+
t.is(azureResponse.status, 404, 'File should not be accessible in Azure');
|
|
688
|
+
|
|
689
|
+
if (isGCSConfigured()) {
|
|
690
|
+
// Verify file is also deleted from GCS
|
|
691
|
+
const exists = await checkGCSFile(uploadResponse.data.gcs);
|
|
692
|
+
t.false(exists, 'File should not exist in GCS');
|
|
693
|
+
}
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
test.serial('should handle dual storage upload and cleanup when GCS configured', async t => {
|
|
697
|
+
if (!isGCSConfigured()) {
|
|
698
|
+
t.pass('Skipping test - GCS not configured');
|
|
699
|
+
return;
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
const requestId = uuidv4();
|
|
703
|
+
const testContent = 'test content for dual storage';
|
|
704
|
+
const form = new FormData();
|
|
705
|
+
form.append('file', Buffer.from(testContent), 'dual-test.txt');
|
|
706
|
+
form.append('requestId', requestId);
|
|
707
|
+
|
|
708
|
+
// Upload file
|
|
709
|
+
const uploadResponse = await uploadFile(Buffer.from(testContent), requestId);
|
|
710
|
+
t.is(uploadResponse.status, 200, 'Upload should succeed');
|
|
711
|
+
t.truthy(uploadResponse.data.url, 'Response should contain Azure URL');
|
|
712
|
+
t.truthy(uploadResponse.data.gcs, 'Response should contain GCS URL');
|
|
713
|
+
t.true(uploadResponse.data.gcs.startsWith('gs://'), 'GCS URL should use gs:// protocol');
|
|
714
|
+
|
|
715
|
+
// Verify file exists in both storages
|
|
716
|
+
await verifyFileInBothStorages(t, uploadResponse);
|
|
717
|
+
|
|
718
|
+
// Get the folder name (requestId) from the URL
|
|
719
|
+
const fileRequestId = getFolderNameFromUrl(uploadResponse.data.url);
|
|
720
|
+
|
|
721
|
+
// Delete file using the correct requestId
|
|
722
|
+
const deleteResponse = await axios.delete(`${baseUrl}?operation=delete&requestId=${fileRequestId}`);
|
|
723
|
+
t.is(deleteResponse.status, 200, 'Delete should succeed');
|
|
724
|
+
|
|
725
|
+
// Verify file is deleted from both storages
|
|
726
|
+
await verifyFileDeletedFromBothStorages(t, uploadResponse);
|
|
727
|
+
});
|
|
728
|
+
|
|
729
|
+
test.serial('should handle GCS URL format and accessibility', async t => {
|
|
730
|
+
if (!isGCSConfigured()) {
|
|
731
|
+
t.pass('Skipping test - GCS not configured');
|
|
732
|
+
return;
|
|
733
|
+
}
|
|
734
|
+
|
|
735
|
+
const requestId = uuidv4();
|
|
736
|
+
const testContent = 'test content for GCS URL verification';
|
|
737
|
+
const form = new FormData();
|
|
738
|
+
form.append('file', Buffer.from(testContent), 'gcs-url-test.txt');
|
|
739
|
+
|
|
740
|
+
// Upload with explicit GCS preference
|
|
741
|
+
const uploadResponse = await axios.post(`http://localhost:${port}/api/CortexFileHandler`, form, {
|
|
742
|
+
params: {
|
|
743
|
+
operation: 'upload',
|
|
744
|
+
requestId,
|
|
745
|
+
useGCS: true
|
|
746
|
+
},
|
|
747
|
+
headers: form.getHeaders()
|
|
748
|
+
});
|
|
749
|
+
|
|
750
|
+
t.is(uploadResponse.status, 200, 'Upload should succeed');
|
|
751
|
+
t.truthy(uploadResponse.data.gcs, 'Response should contain GCS URL');
|
|
752
|
+
t.true(uploadResponse.data.gcs.startsWith('gs://'), 'GCS URL should use gs:// protocol');
|
|
753
|
+
|
|
754
|
+
// Verify content is accessible via normal URL since we can't directly access gs:// URLs
|
|
755
|
+
const fileResponse = await axios.get(uploadResponse.data.url);
|
|
756
|
+
t.is(fileResponse.status, 200, 'File should be accessible');
|
|
757
|
+
t.is(fileResponse.data, testContent, 'Content should match original');
|
|
758
|
+
|
|
759
|
+
// Clean up
|
|
760
|
+
await cleanupUploadedFile(t, uploadResponse.data.url);
|
|
761
|
+
});
|
|
762
|
+
|
|
763
|
+
// Legacy MediaFileChunker Tests
|
|
764
|
+
test.serial('should handle file upload through legacy MediaFileChunker endpoint', async t => {
|
|
765
|
+
const form = new FormData();
|
|
766
|
+
form.append('file', Buffer.from('test content'), 'test.txt');
|
|
767
|
+
|
|
768
|
+
const response = await axios.post(
|
|
769
|
+
`http://localhost:${port}/api/MediaFileChunker`,
|
|
770
|
+
form,
|
|
771
|
+
{
|
|
772
|
+
headers: {
|
|
773
|
+
...form.getHeaders(),
|
|
774
|
+
'Content-Type': 'multipart/form-data'
|
|
775
|
+
},
|
|
776
|
+
validateStatus: status => true,
|
|
777
|
+
timeout: 5000
|
|
778
|
+
}
|
|
779
|
+
);
|
|
780
|
+
|
|
781
|
+
t.is(response.status, 200, 'Upload through legacy endpoint should succeed');
|
|
782
|
+
t.truthy(response.data.url, 'Response should contain file URL');
|
|
783
|
+
|
|
784
|
+
await cleanupUploadedFile(t, response.data.url);
|
|
785
|
+
});
|
|
786
|
+
|
|
787
|
+
test.serial('should handle hash operations through legacy MediaFileChunker endpoint', async t => {
|
|
788
|
+
const testHash = 'test-hash-legacy';
|
|
789
|
+
const form = new FormData();
|
|
790
|
+
form.append('file', Buffer.from('test content'), 'test.txt');
|
|
791
|
+
form.append('hash', testHash);
|
|
792
|
+
|
|
793
|
+
// Upload file with hash through legacy endpoint
|
|
794
|
+
const uploadResponse = await axios.post(
|
|
795
|
+
`http://localhost:${port}/api/MediaFileChunker`,
|
|
796
|
+
form,
|
|
797
|
+
{
|
|
798
|
+
headers: {
|
|
799
|
+
...form.getHeaders(),
|
|
800
|
+
'Content-Type': 'multipart/form-data'
|
|
801
|
+
},
|
|
802
|
+
validateStatus: status => true,
|
|
803
|
+
timeout: 5000
|
|
804
|
+
}
|
|
805
|
+
);
|
|
806
|
+
|
|
807
|
+
t.is(uploadResponse.status, 200, 'Upload should succeed through legacy endpoint');
|
|
808
|
+
t.truthy(uploadResponse.data.url, 'Response should contain file URL');
|
|
809
|
+
|
|
810
|
+
// Wait a bit for Redis to be updated
|
|
811
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
812
|
+
|
|
813
|
+
// Check hash through legacy endpoint
|
|
814
|
+
const hashCheckResponse = await axios.get(`http://localhost:${port}/api/MediaFileChunker`, {
|
|
815
|
+
params: {
|
|
816
|
+
hash: testHash,
|
|
817
|
+
checkHash: true
|
|
818
|
+
},
|
|
819
|
+
validateStatus: status => true,
|
|
820
|
+
timeout: 5000
|
|
821
|
+
});
|
|
822
|
+
|
|
823
|
+
t.is(hashCheckResponse.status, 404, 'Hash check should return 404 for new hash');
|
|
824
|
+
t.is(hashCheckResponse.data, `Hash ${testHash} not found`, 'Should indicate hash not found');
|
|
825
|
+
|
|
826
|
+
await cleanupUploadedFile(t, uploadResponse.data.url);
|
|
827
|
+
});
|
|
828
|
+
|
|
829
|
+
test.serial('should handle delete operation through legacy MediaFileChunker endpoint', async t => {
|
|
830
|
+
const testRequestId = 'test-delete-request-legacy';
|
|
831
|
+
const testContent = 'test content';
|
|
832
|
+
const form = new FormData();
|
|
833
|
+
form.append('file', Buffer.from(testContent), 'test.txt');
|
|
834
|
+
|
|
835
|
+
// Upload a file first through legacy endpoint
|
|
836
|
+
const uploadResponse = await axios.post(
|
|
837
|
+
`http://localhost:${port}/api/MediaFileChunker`,
|
|
838
|
+
form,
|
|
839
|
+
{
|
|
840
|
+
headers: form.getHeaders(),
|
|
841
|
+
validateStatus: status => true,
|
|
842
|
+
timeout: 5000
|
|
843
|
+
}
|
|
844
|
+
);
|
|
845
|
+
t.is(uploadResponse.status, 200, 'Upload should succeed through legacy endpoint');
|
|
846
|
+
|
|
847
|
+
// Extract the folder name from the URL
|
|
848
|
+
const url = uploadResponse.data.url;
|
|
849
|
+
const folderName = getFolderNameFromUrl(url);
|
|
850
|
+
|
|
851
|
+
// Delete the file through legacy endpoint
|
|
852
|
+
const deleteResponse = await axios.delete(`http://localhost:${port}/api/MediaFileChunker?operation=delete&requestId=${folderName}`);
|
|
853
|
+
t.is(deleteResponse.status, 200, 'Delete should succeed through legacy endpoint');
|
|
854
|
+
t.true(Array.isArray(deleteResponse.data.body), 'Response should be an array of deleted files');
|
|
855
|
+
t.true(deleteResponse.data.body.length > 0, 'Should have deleted at least one file');
|
|
856
|
+
t.true(deleteResponse.data.body[0].includes(folderName), 'Deleted file should contain folder name');
|
|
857
|
+
});
|
|
858
|
+
|
|
859
|
+
test.serial('should handle parameter validation through legacy MediaFileChunker endpoint', async t => {
|
|
860
|
+
// Test missing parameters
|
|
861
|
+
const response = await axios.get(`http://localhost:${port}/api/MediaFileChunker`, {
|
|
862
|
+
validateStatus: status => true,
|
|
863
|
+
timeout: 5000
|
|
864
|
+
});
|
|
865
|
+
|
|
866
|
+
t.is(response.status, 400, 'Should return 400 for missing parameters');
|
|
867
|
+
t.is(
|
|
868
|
+
response.data,
|
|
869
|
+
'Please pass a uri and requestId on the query string or in the request body',
|
|
870
|
+
'Should return proper error message'
|
|
871
|
+
);
|
|
872
|
+
});
|
|
873
|
+
|
|
874
|
+
test.serial('should handle empty POST request through legacy MediaFileChunker endpoint', async t => {
|
|
875
|
+
const form = new FormData();
|
|
876
|
+
try {
|
|
877
|
+
await axios.post(
|
|
878
|
+
`http://localhost:${port}/api/MediaFileChunker`,
|
|
879
|
+
form,
|
|
880
|
+
{
|
|
881
|
+
headers: form.getHeaders(),
|
|
882
|
+
timeout: 5000
|
|
883
|
+
}
|
|
884
|
+
);
|
|
885
|
+
t.fail('Should have thrown error');
|
|
886
|
+
} catch (error) {
|
|
887
|
+
t.is(error.response.status, 400, 'Should return 400 for empty POST request');
|
|
888
|
+
t.is(error.response.data, 'No file provided in request', 'Should return proper error message');
|
|
889
|
+
}
|
|
890
|
+
});
|
|
891
|
+
|
|
892
|
+
test.serial('should handle complete upload-request-delete-verify sequence through legacy MediaFileChunker endpoint', async t => {
|
|
893
|
+
const testContent = 'test content for legacy sequence';
|
|
894
|
+
const testHash = 'test-legacy-sequence-hash';
|
|
895
|
+
const form = new FormData();
|
|
896
|
+
form.append('file', Buffer.from(testContent), 'sequence-test.txt');
|
|
897
|
+
form.append('hash', testHash);
|
|
898
|
+
|
|
899
|
+
// Upload file with hash through legacy endpoint
|
|
900
|
+
const uploadResponse = await axios.post(
|
|
901
|
+
`http://localhost:${port}/api/MediaFileChunker`,
|
|
902
|
+
form,
|
|
903
|
+
{
|
|
904
|
+
headers: form.getHeaders(),
|
|
905
|
+
validateStatus: status => true,
|
|
906
|
+
timeout: 5000
|
|
907
|
+
}
|
|
908
|
+
);
|
|
909
|
+
t.is(uploadResponse.status, 200, 'Upload should succeed through legacy endpoint');
|
|
910
|
+
t.truthy(uploadResponse.data.url, 'Response should contain URL');
|
|
911
|
+
|
|
912
|
+
await cleanupUploadedFile(t, uploadResponse.data.url);
|
|
913
|
+
|
|
914
|
+
// Verify hash is gone by trying to get the file URL through legacy endpoint
|
|
915
|
+
const hashCheckResponse = await axios.get(`http://localhost:${port}/api/MediaFileChunker`, {
|
|
916
|
+
params: {
|
|
917
|
+
hash: testHash,
|
|
918
|
+
checkHash: true
|
|
919
|
+
},
|
|
920
|
+
validateStatus: status => true
|
|
921
|
+
});
|
|
922
|
+
t.is(hashCheckResponse.status, 404, 'Hash should not exist after deletion');
|
|
923
|
+
});
|
|
924
|
+
|
|
925
|
+
// Cleanup
|
|
926
|
+
test.after.always('cleanup', async t => {
|
|
927
|
+
// Add any necessary cleanup here
|
|
928
|
+
});
|