@aj-archipelago/cortex 1.3.49 → 1.3.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/config.js +1 -1
  2. package/helper-apps/cortex-browser/Dockerfile +19 -31
  3. package/helper-apps/cortex-browser/function_app.py +708 -181
  4. package/helper-apps/cortex-browser/requirements.txt +4 -4
  5. package/helper-apps/cortex-file-handler/blobHandler.js +850 -429
  6. package/helper-apps/cortex-file-handler/constants.js +64 -48
  7. package/helper-apps/cortex-file-handler/docHelper.js +7 -114
  8. package/helper-apps/cortex-file-handler/fileChunker.js +96 -51
  9. package/helper-apps/cortex-file-handler/function.json +2 -6
  10. package/helper-apps/cortex-file-handler/helper.js +34 -25
  11. package/helper-apps/cortex-file-handler/index.js +324 -136
  12. package/helper-apps/cortex-file-handler/localFileHandler.js +56 -57
  13. package/helper-apps/cortex-file-handler/package-lock.json +6065 -5964
  14. package/helper-apps/cortex-file-handler/package.json +8 -4
  15. package/helper-apps/cortex-file-handler/redis.js +23 -17
  16. package/helper-apps/cortex-file-handler/scripts/setup-azure-container.js +12 -9
  17. package/helper-apps/cortex-file-handler/scripts/setup-test-containers.js +21 -18
  18. package/helper-apps/cortex-file-handler/scripts/test-azure.sh +1 -1
  19. package/helper-apps/cortex-file-handler/scripts/test-gcs.sh +1 -1
  20. package/helper-apps/cortex-file-handler/services/ConversionService.js +288 -0
  21. package/helper-apps/cortex-file-handler/services/FileConversionService.js +53 -0
  22. package/helper-apps/cortex-file-handler/start.js +63 -38
  23. package/helper-apps/cortex-file-handler/tests/FileConversionService.test.js +144 -0
  24. package/helper-apps/cortex-file-handler/tests/blobHandler.test.js +88 -64
  25. package/helper-apps/cortex-file-handler/tests/fileChunker.test.js +114 -91
  26. package/helper-apps/cortex-file-handler/tests/fileUpload.test.js +351 -0
  27. package/helper-apps/cortex-file-handler/tests/files/DOCX_TestPage.docx +0 -0
  28. package/helper-apps/cortex-file-handler/tests/files/tests-example.xls +0 -0
  29. package/helper-apps/cortex-file-handler/tests/start.test.js +943 -642
  30. package/helper-apps/cortex-file-handler/tests/testUtils.helper.js +31 -0
  31. package/helper-apps/cortex-markitdown/.funcignore +1 -0
  32. package/helper-apps/cortex-markitdown/MarkitdownConverterFunction/__init__.py +64 -0
  33. package/helper-apps/cortex-markitdown/MarkitdownConverterFunction/function.json +21 -0
  34. package/helper-apps/cortex-markitdown/README.md +94 -0
  35. package/helper-apps/cortex-markitdown/host.json +15 -0
  36. package/helper-apps/cortex-markitdown/requirements.txt +2 -0
  37. package/lib/requestExecutor.js +44 -36
  38. package/package.json +1 -1
  39. package/pathways/system/entity/tools/sys_tool_cognitive_search.js +1 -1
  40. package/pathways/system/entity/tools/sys_tool_readfile.js +24 -2
  41. package/server/plugins/openAiWhisperPlugin.js +59 -87
  42. package/helper-apps/cortex-file-handler/tests/docHelper.test.js +0 -148
@@ -1,22 +1,28 @@
1
1
  /* eslint-disable no-unused-vars */
2
+ import { execSync } from 'child_process';
3
+ import fs from 'fs';
4
+ import os from 'os';
5
+ import path from 'path';
6
+ import { PassThrough } from 'stream';
7
+
2
8
  import test from 'ava';
3
9
  import axios from 'axios';
4
10
  // eslint-disable-next-line import/no-extraneous-dependencies
5
11
  import FormData from 'form-data';
6
- import { port, publicFolder, ipAddress } from '../start.js';
7
12
  import { v4 as uuidv4 } from 'uuid';
8
- import path from 'path';
9
- import os from 'os';
10
- import fs from 'fs';
11
- import { execSync } from 'child_process';
13
+
14
+ import { port, publicFolder, ipAddress } from '../start.js';
15
+ import { cleanupHashAndFile, getFolderNameFromUrl } from './testUtils.helper.js';
12
16
 
13
17
  // Add these helper functions at the top after imports
14
18
  const baseUrl = `http://localhost:${port}/api/CortexFileHandler`;
15
19
 
16
20
  // Helper function to determine if Azure is configured
17
21
  function isAzureConfigured() {
18
- return process.env.AZURE_STORAGE_CONNECTION_STRING &&
19
- process.env.AZURE_STORAGE_CONNECTION_STRING !== 'UseDevelopmentStorage=true';
22
+ return (
23
+ process.env.AZURE_STORAGE_CONNECTION_STRING &&
24
+ process.env.AZURE_STORAGE_CONNECTION_STRING !== 'UseDevelopmentStorage=true'
25
+ );
20
26
  }
21
27
 
22
28
  // Helper function to convert URLs for testing
@@ -35,66 +41,73 @@ async function cleanupUploadedFile(t, url) {
35
41
  // Convert URL to use localhost
36
42
  url = convertToLocalUrl(url);
37
43
  const folderName = getFolderNameFromUrl(url);
38
-
44
+
39
45
  // Delete the file
40
- const deleteResponse = await axios.delete(`${baseUrl}?operation=delete&requestId=${folderName}`);
46
+ const deleteResponse = await axios.delete(
47
+ `${baseUrl}?operation=delete&requestId=${folderName}`,
48
+ );
41
49
  t.is(deleteResponse.status, 200, 'Delete should succeed');
42
- t.true(Array.isArray(deleteResponse.data.body), 'Delete response should be an array');
43
- t.true(deleteResponse.data.body.length > 0, 'Should have deleted at least one file');
44
-
50
+ t.true(
51
+ Array.isArray(deleteResponse.data.body),
52
+ 'Delete response should be an array',
53
+ );
54
+ t.true(
55
+ deleteResponse.data.body.length > 0,
56
+ 'Should have deleted at least one file',
57
+ );
58
+
45
59
  // Verify file is gone
46
60
  const verifyResponse = await axios.get(url, {
47
- validateStatus: status => true,
48
- timeout: 5000
61
+ validateStatus: (status) => true,
62
+ timeout: 5000,
49
63
  });
50
64
  t.is(verifyResponse.status, 404, 'File should not exist after deletion');
51
65
  }
52
66
 
53
- // Helper function to get folder name from URL
54
- function getFolderNameFromUrl(url) {
55
- const urlObj = new URL(url);
56
- const parts = urlObj.pathname.split('/');
57
- // For Azure URLs (contains 127.0.0.1:10000), folder name is at index 3
58
- if (url.includes('127.0.0.1:10000')) {
59
- return parts[3].split('_')[0];
60
- }
61
- // For local storage URLs, folder name is at index 2
62
- return parts[2].split('_')[0];
63
- }
64
-
65
67
  // Helper function to upload files
66
68
  async function uploadFile(file, requestId, hash = null) {
67
69
  const form = new FormData();
68
- form.append('file', file);
70
+
71
+ // If file is a Buffer, create a Readable stream
72
+ if (Buffer.isBuffer(file)) {
73
+ const { Readable } = await import('stream');
74
+ const stream = Readable.from(file);
75
+ form.append('file', stream, { filename: 'test.txt' });
76
+ } else {
77
+ form.append('file', file);
78
+ }
79
+
69
80
  if (requestId) form.append('requestId', requestId);
70
81
  if (hash) form.append('hash', hash);
71
-
82
+
72
83
  const response = await axios.post(baseUrl, form, {
73
84
  headers: {
74
85
  ...form.getHeaders(),
75
- 'Content-Type': 'multipart/form-data'
86
+ 'Content-Type': 'multipart/form-data',
76
87
  },
77
- validateStatus: status => true,
78
- timeout: 5000
88
+ validateStatus: (status) => true,
89
+ timeout: 5000,
90
+ maxContentLength: Infinity,
91
+ maxBodyLength: Infinity,
79
92
  });
80
93
 
81
94
  if (response.data?.url) {
82
95
  response.data.url = convertToLocalUrl(response.data.url);
83
96
  }
84
-
97
+
85
98
  return response;
86
99
  }
87
100
 
88
101
  // Ensure server is ready before tests
89
- test.before(async t => {
102
+ test.before(async (t) => {
90
103
  // Wait for server to be ready
91
- await new Promise(resolve => setTimeout(resolve, 1000));
92
-
104
+ await new Promise((resolve) => setTimeout(resolve, 1000));
105
+
93
106
  // Verify server is responding
94
107
  try {
95
108
  await axios.get(`http://localhost:${port}/files`);
96
109
  } catch (error) {
97
- // 404 is fine, it means server is running but directory is empty
110
+ // 404 is fine, it means server is running but directory is empty
98
111
  if (error.response?.status !== 404) {
99
112
  throw new Error('Server not ready');
100
113
  }
@@ -102,52 +115,64 @@ test.before(async t => {
102
115
  });
103
116
 
104
117
  // Configuration Tests
105
- test('should have valid server configuration', t => {
118
+ test('should have valid server configuration', (t) => {
106
119
  t.truthy(port, 'Port should be defined');
107
120
  t.truthy(publicFolder, 'Public folder should be defined');
108
121
  t.truthy(ipAddress, 'IP address should be defined');
109
122
  });
110
123
 
111
124
  // Parameter Validation Tests
112
- test.serial('should validate required parameters on CortexFileHandler endpoint', async t => {
113
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
114
- validateStatus: status => true,
115
- timeout: 5000
116
- });
117
-
118
- t.is(response.status, 400, 'Should return 400 for missing parameters');
119
- t.is(
120
- response.data,
121
- 'Please pass a uri and requestId on the query string or in the request body',
122
- 'Should return proper error message'
123
- );
124
- });
125
+ test.serial(
126
+ 'should validate required parameters on CortexFileHandler endpoint',
127
+ async (t) => {
128
+ const response = await axios.get(
129
+ `http://localhost:${port}/api/CortexFileHandler`,
130
+ {
131
+ validateStatus: (status) => true,
132
+ timeout: 5000,
133
+ },
134
+ );
125
135
 
126
- test.serial('should validate required parameters on MediaFileChunker legacy endpoint', async t => {
127
- const response = await axios.get(`http://localhost:${port}/api/MediaFileChunker`, {
128
- validateStatus: status => true,
129
- timeout: 5000
130
- });
131
-
132
- t.is(response.status, 400, 'Should return 400 for missing parameters');
133
- t.is(
134
- response.data,
135
- 'Please pass a uri and requestId on the query string or in the request body',
136
- 'Should return proper error message'
137
- );
138
- });
136
+ t.is(response.status, 400, 'Should return 400 for missing parameters');
137
+ t.is(
138
+ response.data,
139
+ 'Please pass a uri and requestId on the query string or in the request body',
140
+ 'Should return proper error message',
141
+ );
142
+ },
143
+ );
144
+
145
+ test.serial(
146
+ 'should validate required parameters on MediaFileChunker legacy endpoint',
147
+ async (t) => {
148
+ const response = await axios.get(
149
+ `http://localhost:${port}/api/MediaFileChunker`,
150
+ {
151
+ validateStatus: (status) => true,
152
+ timeout: 5000,
153
+ },
154
+ );
155
+
156
+ t.is(response.status, 400, 'Should return 400 for missing parameters');
157
+ t.is(
158
+ response.data,
159
+ 'Please pass a uri and requestId on the query string or in the request body',
160
+ 'Should return proper error message',
161
+ );
162
+ },
163
+ );
139
164
 
140
165
  // Static Files Tests
141
- test.serial('should serve static files from public directory', async t => {
166
+ test.serial('should serve static files from public directory', async (t) => {
142
167
  try {
143
168
  const response = await axios.get(`http://localhost:${port}/files`, {
144
169
  timeout: 5000,
145
- validateStatus: status => status === 200 || status === 404
170
+ validateStatus: (status) => status === 200 || status === 404,
146
171
  });
147
-
172
+
148
173
  t.true(
149
174
  response.status === 200 || response.status === 404,
150
- 'Should respond with 200 or 404 for static files'
175
+ 'Should respond with 200 or 404 for static files',
151
176
  );
152
177
  } catch (error) {
153
178
  t.fail(`Failed to connect to files endpoint: ${error.message}`);
@@ -155,259 +180,356 @@ test.serial('should serve static files from public directory', async t => {
155
180
  });
156
181
 
157
182
  // Hash Operation Tests
158
- test.serial('should handle non-existent hash check', async t => {
159
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
160
- params: {
161
- hash: 'nonexistent-hash',
162
- checkHash: true
183
+ test.serial('should handle non-existent hash check', async (t) => {
184
+ const response = await axios.get(
185
+ `http://localhost:${port}/api/CortexFileHandler`,
186
+ {
187
+ params: {
188
+ hash: 'nonexistent-hash',
189
+ checkHash: true,
190
+ },
191
+ validateStatus: (status) => true,
192
+ timeout: 5000,
163
193
  },
164
- validateStatus: status => true,
165
- timeout: 5000
166
- });
167
-
168
- t.is(response.status, 404, 'Should return 404 for non-existent hash');
169
- t.is(response.data, 'Hash nonexistent-hash not found', 'Should return proper error message');
170
- });
194
+ );
171
195
 
172
- test.serial('should handle hash clearing for non-existent hash', async t => {
173
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
174
- params: {
175
- hash: 'nonexistent-hash',
176
- clearHash: true
177
- },
178
- validateStatus: status => true,
179
- timeout: 5000
180
- });
181
-
182
196
  t.is(response.status, 404, 'Should return 404 for non-existent hash');
183
- t.is(response.data, 'Hash nonexistent-hash not found', 'Should return proper message');
197
+ t.is(
198
+ response.data,
199
+ 'Hash nonexistent-hash not found',
200
+ 'Should return proper error message',
201
+ );
184
202
  });
185
203
 
186
- test.serial('should handle hash operations without hash parameter', async t => {
187
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
188
- params: {
189
- checkHash: true
204
+ test.serial('should handle hash clearing for non-existent hash', async (t) => {
205
+ const response = await axios.get(
206
+ `http://localhost:${port}/api/CortexFileHandler`,
207
+ {
208
+ params: {
209
+ hash: 'nonexistent-hash',
210
+ clearHash: true,
211
+ },
212
+ validateStatus: (status) => true,
213
+ timeout: 5000,
190
214
  },
191
- validateStatus: status => true,
192
- timeout: 5000
193
- });
194
-
195
- t.is(response.status, 400, 'Should return 400 for missing hash');
215
+ );
216
+
217
+ t.is(response.status, 404, 'Should return 404 for non-existent hash');
196
218
  t.is(
197
219
  response.data,
198
- 'Please pass a uri and requestId on the query string or in the request body',
199
- 'Should return proper error message'
220
+ 'Hash nonexistent-hash not found',
221
+ 'Should return proper message',
200
222
  );
201
223
  });
202
224
 
225
+ test.serial(
226
+ 'should handle hash operations without hash parameter',
227
+ async (t) => {
228
+ const response = await axios.get(
229
+ `http://localhost:${port}/api/CortexFileHandler`,
230
+ {
231
+ params: {
232
+ checkHash: true,
233
+ },
234
+ validateStatus: (status) => true,
235
+ timeout: 5000,
236
+ },
237
+ );
238
+
239
+ t.is(response.status, 400, 'Should return 400 for missing hash');
240
+ t.is(
241
+ response.data,
242
+ 'Please pass a uri and requestId on the query string or in the request body',
243
+ 'Should return proper error message',
244
+ );
245
+ },
246
+ );
247
+
203
248
  // URL Validation Tests
204
- test.serial('should reject invalid URLs', async t => {
205
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
206
- params: {
207
- uri: 'not-a-valid-url',
208
- requestId: 'test-request'
249
+ test.serial('should reject invalid URLs', async (t) => {
250
+ const response = await axios.get(
251
+ `http://localhost:${port}/api/CortexFileHandler`,
252
+ {
253
+ params: {
254
+ uri: 'not-a-valid-url',
255
+ requestId: 'test-request',
256
+ },
257
+ validateStatus: (status) => true,
258
+ timeout: 5000,
209
259
  },
210
- validateStatus: status => true,
211
- timeout: 5000
212
- });
213
-
260
+ );
261
+
214
262
  t.is(response.status, 500, 'Should return 500 for invalid URL');
215
- t.true(response.data.includes('Invalid URL'), 'Should indicate invalid URL in error message');
263
+ t.true(
264
+ response.data.includes('Invalid URL'),
265
+ 'Should indicate invalid URL in error message',
266
+ );
216
267
  });
217
268
 
218
- test.serial('should reject unsupported protocols', async t => {
219
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
220
- params: {
221
- uri: 'ftp://example.com/test.mp3',
222
- requestId: 'test-request'
269
+ test.serial('should reject unsupported protocols', async (t) => {
270
+ const response = await axios.get(
271
+ `http://localhost:${port}/api/CortexFileHandler`,
272
+ {
273
+ params: {
274
+ uri: 'ftp://example.com/test.mp3',
275
+ requestId: 'test-request',
276
+ },
277
+ validateStatus: (status) => true,
278
+ timeout: 5000,
223
279
  },
224
- validateStatus: status => true,
225
- timeout: 5000
226
- });
227
-
280
+ );
281
+
228
282
  t.is(response.status, 500, 'Should return 500 for unsupported protocol');
229
283
  t.true(
230
284
  response.data.includes('Error processing media file'),
231
- 'Should indicate error processing media file'
285
+ 'Should indicate error processing media file',
232
286
  );
233
287
  });
234
288
 
235
289
  // Remote File Operation Tests
236
- test.serial('should validate remote file URL format', async t => {
237
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
238
- params: {
239
- fetch: 'not-a-valid-url'
290
+ test.serial('should validate remote file URL format', async (t) => {
291
+ const response = await axios.get(
292
+ `http://localhost:${port}/api/CortexFileHandler`,
293
+ {
294
+ params: {
295
+ fetch: 'not-a-valid-url',
296
+ },
297
+ validateStatus: (status) => true,
298
+ timeout: 5000,
240
299
  },
241
- validateStatus: status => true,
242
- timeout: 5000
243
- });
244
-
300
+ );
301
+
245
302
  t.is(response.status, 400, 'Should return 400 for invalid remote URL');
246
- t.is(response.data, 'Invalid or inaccessible URL', 'Should return proper error message');
303
+ t.is(
304
+ response.data,
305
+ 'Invalid or inaccessible URL',
306
+ 'Should return proper error message',
307
+ );
247
308
  });
248
309
 
249
- test.serial('should handle restore operation with invalid URL', async t => {
250
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
251
- params: {
252
- restore: 'not-a-valid-url'
310
+ test.serial('should handle restore operation with invalid URL', async (t) => {
311
+ const response = await axios.get(
312
+ `http://localhost:${port}/api/CortexFileHandler`,
313
+ {
314
+ params: {
315
+ restore: 'not-a-valid-url',
316
+ },
317
+ validateStatus: (status) => true,
318
+ timeout: 5000,
253
319
  },
254
- validateStatus: status => true,
255
- timeout: 5000
256
- });
257
-
320
+ );
321
+
258
322
  t.is(response.status, 400, 'Should return 400 for invalid restore URL');
259
- t.is(response.data, 'Invalid or inaccessible URL', 'Should return proper error message');
323
+ t.is(
324
+ response.data,
325
+ 'Invalid or inaccessible URL',
326
+ 'Should return proper error message',
327
+ );
260
328
  });
261
329
 
262
- test.serial('should handle load operation with invalid URL', async t => {
263
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
264
- params: {
265
- load: 'not-a-valid-url'
330
+ test.serial('should handle load operation with invalid URL', async (t) => {
331
+ const response = await axios.get(
332
+ `http://localhost:${port}/api/CortexFileHandler`,
333
+ {
334
+ params: {
335
+ load: 'not-a-valid-url',
336
+ },
337
+ validateStatus: (status) => true,
338
+ timeout: 5000,
266
339
  },
267
- validateStatus: status => true,
268
- timeout: 5000
269
- });
270
-
340
+ );
341
+
271
342
  t.is(response.status, 400, 'Should return 400 for invalid load URL');
272
- t.is(response.data, 'Invalid or inaccessible URL', 'Should return proper error message');
343
+ t.is(
344
+ response.data,
345
+ 'Invalid or inaccessible URL',
346
+ 'Should return proper error message',
347
+ );
273
348
  });
274
349
 
275
350
  // Delete Operation Tests
276
- test.serial('should validate requestId for delete operation', async t => {
277
- const response = await axios.delete(`http://localhost:${port}/api/CortexFileHandler`, {
278
- validateStatus: status => true,
279
- timeout: 5000
280
- });
281
-
351
+ test.serial('should validate requestId for delete operation', async (t) => {
352
+ const response = await axios.delete(
353
+ `http://localhost:${port}/api/CortexFileHandler`,
354
+ {
355
+ validateStatus: (status) => true,
356
+ timeout: 5000,
357
+ },
358
+ );
359
+
282
360
  t.is(response.status, 400, 'Should return 400 for missing requestId');
283
361
  t.is(
284
362
  response.data,
285
363
  'Please pass a requestId on the query string',
286
- 'Should return proper error message'
364
+ 'Should return proper error message',
287
365
  );
288
366
  });
289
367
 
290
- test.serial('should handle delete with valid requestId', async t => {
368
+ test.serial('should handle delete with valid requestId', async (t) => {
291
369
  const testRequestId = 'test-delete-request';
292
370
  const testContent = 'test content';
293
371
  const form = new FormData();
294
372
  form.append('file', Buffer.from(testContent), 'test.txt');
295
-
373
+
296
374
  // Upload a file first
297
375
  const uploadResponse = await axios.post(baseUrl, form, {
298
376
  headers: form.getHeaders(),
299
- validateStatus: status => true,
300
- timeout: 5000
377
+ validateStatus: (status) => true,
378
+ timeout: 5000,
301
379
  });
302
380
  t.is(uploadResponse.status, 200, 'Upload should succeed');
303
-
381
+
304
382
  // Extract the folder name from the URL
305
383
  const url = uploadResponse.data.url;
306
384
  const folderName = getFolderNameFromUrl(url);
307
-
385
+
308
386
  // Delete the file
309
- const deleteResponse = await axios.delete(`${baseUrl}?operation=delete&requestId=${folderName}`);
387
+ const deleteResponse = await axios.delete(
388
+ `${baseUrl}?operation=delete&requestId=${folderName}`,
389
+ );
310
390
  t.is(deleteResponse.status, 200, 'Delete should succeed');
311
- t.true(Array.isArray(deleteResponse.data.body), 'Response should be an array of deleted files');
312
- t.true(deleteResponse.data.body.length > 0, 'Should have deleted at least one file');
313
- t.true(deleteResponse.data.body[0].includes(folderName), 'Deleted file should contain folder name');
391
+ t.true(
392
+ Array.isArray(deleteResponse.data.body),
393
+ 'Response should be an array of deleted files',
394
+ );
395
+ t.true(
396
+ deleteResponse.data.body.length > 0,
397
+ 'Should have deleted at least one file',
398
+ );
399
+ t.true(
400
+ deleteResponse.data.body[0].includes(folderName),
401
+ 'Deleted file should contain folder name',
402
+ );
314
403
  });
315
404
 
316
- test.serial('should handle delete with non-existent requestId', async t => {
317
- const response = await axios.delete(`http://localhost:${port}/api/CortexFileHandler`, {
318
- params: {
319
- requestId: 'nonexistent-request'
405
+ test.serial('should handle delete with non-existent requestId', async (t) => {
406
+ const response = await axios.delete(
407
+ `http://localhost:${port}/api/CortexFileHandler`,
408
+ {
409
+ params: {
410
+ requestId: 'nonexistent-request',
411
+ },
412
+ validateStatus: (status) => true,
413
+ timeout: 30000,
320
414
  },
321
- validateStatus: status => true,
322
- timeout: 30000
323
- });
324
-
325
- t.is(response.status, 200, 'Should return 200 even for non-existent requestId');
326
- t.deepEqual(response.data.body, [], 'Should return empty array for non-existent requestId');
415
+ );
416
+
417
+ t.is(
418
+ response.status,
419
+ 200,
420
+ 'Should return 200 even for non-existent requestId',
421
+ );
422
+ t.deepEqual(
423
+ response.data.body,
424
+ [],
425
+ 'Should return empty array for non-existent requestId',
426
+ );
327
427
  });
328
428
 
329
- test('should handle delete with invalid requestId', async t => {
330
- const response = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
331
- params: {
332
- requestId: 'nonexistent-request',
333
- operation: 'delete'
429
+ test('should handle delete with invalid requestId', async (t) => {
430
+ const response = await axios.get(
431
+ `http://localhost:${port}/api/CortexFileHandler`,
432
+ {
433
+ params: {
434
+ requestId: 'nonexistent-request',
435
+ operation: 'delete',
436
+ },
437
+ timeout: 5000,
334
438
  },
335
- timeout: 5000
336
- });
337
- t.is(response.status, 200, 'Should return 200 for delete with invalid requestId');
439
+ );
440
+ t.is(
441
+ response.status,
442
+ 200,
443
+ 'Should return 200 for delete with invalid requestId',
444
+ );
338
445
  t.true(Array.isArray(response.data.body), 'Response should be an array');
339
- t.is(response.data.body.length, 0, 'Response should be empty array for non-existent requestId');
446
+ t.is(
447
+ response.data.body.length,
448
+ 0,
449
+ 'Response should be empty array for non-existent requestId',
450
+ );
340
451
  });
341
452
 
342
453
  // POST Operation Tests
343
- test('should handle empty POST request', async t => {
454
+ test('should handle empty POST request', async (t) => {
344
455
  const form = new FormData();
345
456
  try {
346
- await axios.post(
347
- `http://localhost:${port}/api/CortexFileHandler`,
348
- form,
349
- {
350
- headers: form.getHeaders(),
351
- timeout: 5000
352
- }
353
- );
457
+ await axios.post(`http://localhost:${port}/api/CortexFileHandler`, form, {
458
+ headers: form.getHeaders(),
459
+ timeout: 5000,
460
+ });
354
461
  t.fail('Should have thrown error');
355
462
  } catch (error) {
356
- t.is(error.response.status, 400, 'Should return 400 for empty POST request');
357
- t.is(error.response.data, 'No file provided in request', 'Should return proper error message');
463
+ t.is(
464
+ error.response.status,
465
+ 400,
466
+ 'Should return 400 for empty POST request',
467
+ );
468
+ t.is(
469
+ error.response.data,
470
+ 'No file provided in request',
471
+ 'Should return proper error message',
472
+ );
358
473
  }
359
474
  });
360
475
 
361
476
  // Upload Tests
362
- test.serial('should handle successful file upload with hash', async t => {
477
+ test.serial('should handle successful file upload with hash', async (t) => {
363
478
  const form = new FormData();
364
479
  const testHash = 'test-hash-123';
365
480
  const testContent = 'test content';
366
481
  form.append('file', Buffer.from(testContent), 'test.txt');
367
482
  form.append('hash', testHash);
368
-
369
- // Upload file with hash
370
- const uploadResponse = await axios.post(
371
- `http://localhost:${port}/api/CortexFileHandler`,
372
- form,
373
- {
374
- headers: {
375
- ...form.getHeaders(),
376
- 'Content-Type': 'multipart/form-data'
483
+
484
+ let uploadedUrl;
485
+ try {
486
+ // Upload file with hash
487
+ const uploadResponse = await axios.post(
488
+ `http://localhost:${port}/api/CortexFileHandler`,
489
+ form,
490
+ {
491
+ headers: {
492
+ ...form.getHeaders(),
493
+ 'Content-Type': 'multipart/form-data',
494
+ },
495
+ validateStatus: (status) => true,
496
+ timeout: 5000,
377
497
  },
378
- validateStatus: status => true,
379
- timeout: 5000
380
- }
381
- );
382
-
383
- t.is(uploadResponse.status, 200, 'Upload should succeed');
384
- t.truthy(uploadResponse.data.url, 'Response should contain file URL');
385
-
386
- // Wait a bit for Redis to be updated
387
- await new Promise(resolve => setTimeout(resolve, 1000));
388
-
389
- // Verify hash exists and returns the file info
390
- const hashCheckResponse = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
391
- params: {
392
- hash: testHash,
393
- checkHash: true
394
- },
395
- validateStatus: status => true,
396
- timeout: 5000
397
- });
398
-
399
- t.is(hashCheckResponse.status, 404, 'Hash check should return 404 for new hash');
400
- t.is(hashCheckResponse.data, `Hash ${testHash} not found`, 'Should indicate hash not found');
498
+ );
401
499
 
402
- await cleanupUploadedFile(t, uploadResponse.data.url);
500
+ t.is(uploadResponse.status, 200, 'Upload should succeed');
501
+ t.truthy(uploadResponse.data.url, 'Response should contain file URL');
502
+ uploadedUrl = uploadResponse.data.url;
503
+
504
+ // Wait a bit for Redis to be updated
505
+ await new Promise((resolve) => setTimeout(resolve, 1000));
506
+
507
+ // Verify hash exists and returns the file info
508
+ const hashCheckResponse = await axios.get(
509
+ `http://localhost:${port}/api/CortexFileHandler`,
510
+ {
511
+ params: {
512
+ hash: testHash,
513
+ checkHash: true,
514
+ },
515
+ validateStatus: (status) => true,
516
+ timeout: 5000,
517
+ },
518
+ );
519
+
520
+ t.is(hashCheckResponse.status, 200, 'Hash check should return 200 for uploaded hash');
521
+ t.truthy(hashCheckResponse.data.url, 'Hash check should return file URL');
522
+ } finally {
523
+ await cleanupHashAndFile(testHash, uploadedUrl, baseUrl);
524
+ }
403
525
  });
404
526
 
405
- test.serial('should handle hash clearing', async t => {
527
+ test.serial('should handle hash clearing', async (t) => {
406
528
  const testHash = 'test-hash-to-clear';
407
529
  const form = new FormData();
408
530
  form.append('file', Buffer.from('test content'), 'test.txt');
409
531
  form.append('hash', testHash);
410
-
532
+
411
533
  // First upload a file with the hash
412
534
  const uploadResponse = await axios.post(
413
535
  `http://localhost:${port}/api/CortexFileHandler`,
@@ -415,171 +537,208 @@ test.serial('should handle hash clearing', async t => {
415
537
  {
416
538
  headers: {
417
539
  ...form.getHeaders(),
418
- 'Content-Type': 'multipart/form-data'
540
+ 'Content-Type': 'multipart/form-data',
419
541
  },
420
- validateStatus: status => true,
421
- timeout: 5000
422
- }
542
+ validateStatus: (status) => true,
543
+ timeout: 5000,
544
+ },
423
545
  );
424
-
546
+
425
547
  t.is(uploadResponse.status, 200, 'Upload should succeed');
426
548
  t.truthy(uploadResponse.data.url, 'Response should contain file URL');
427
-
549
+
428
550
  // Wait a bit for Redis to be updated
429
- await new Promise(resolve => setTimeout(resolve, 1000));
430
-
431
- // Clear the hash
432
- const clearResponse = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
433
- params: {
434
- hash: testHash,
435
- clearHash: true
551
+ await new Promise((resolve) => setTimeout(resolve, 1000));
552
+
553
+ // Clear the hash (should succeed)
554
+ const clearResponse = await axios.get(
555
+ `http://localhost:${port}/api/CortexFileHandler`,
556
+ {
557
+ params: {
558
+ hash: testHash,
559
+ clearHash: true,
560
+ },
561
+ validateStatus: (status) => true,
562
+ timeout: 5000,
436
563
  },
437
- validateStatus: status => true,
438
- timeout: 5000
439
- });
440
-
441
- t.is(clearResponse.status, 404, 'Hash clearing should return 404 for new hash');
442
- t.is(clearResponse.data, `Hash ${testHash} not found`, 'Should indicate hash not found');
443
-
564
+ );
565
+
566
+ t.is(clearResponse.status, 200, 'Hash clearing should return 200 for existing hash');
567
+ t.is(clearResponse.data, `Hash ${testHash} removed`, 'Should indicate hash was removed');
568
+
569
+ // Second clear (should return 404)
570
+ const clearAgainResponse = await axios.get(
571
+ `http://localhost:${port}/api/CortexFileHandler`,
572
+ {
573
+ params: {
574
+ hash: testHash,
575
+ clearHash: true,
576
+ },
577
+ validateStatus: (status) => true,
578
+ timeout: 5000,
579
+ },
580
+ );
581
+ t.is(clearAgainResponse.status, 404, 'Hash clearing should return 404 for already removed hash');
582
+ t.is(clearAgainResponse.data, `Hash ${testHash} not found`, 'Should indicate hash not found');
583
+
444
584
  // Verify hash no longer exists
445
- const verifyResponse = await axios.get(`http://localhost:${port}/api/CortexFileHandler`, {
446
- params: {
447
- hash: testHash,
448
- checkHash: true
585
+ const verifyResponse = await axios.get(
586
+ `http://localhost:${port}/api/CortexFileHandler`,
587
+ {
588
+ params: {
589
+ hash: testHash,
590
+ checkHash: true,
591
+ },
592
+ validateStatus: (status) => true,
593
+ timeout: 5000,
449
594
  },
450
- validateStatus: status => true,
451
- timeout: 5000
452
- });
453
-
595
+ );
596
+
454
597
  t.is(verifyResponse.status, 404, 'Hash should not exist');
455
- t.is(verifyResponse.data, `Hash ${testHash} not found`, 'Should indicate hash not found');
598
+ t.is(
599
+ verifyResponse.data,
600
+ `Hash ${testHash} not found`,
601
+ 'Should indicate hash not found',
602
+ );
456
603
 
457
604
  // Clean up the uploaded file
458
605
  await cleanupUploadedFile(t, uploadResponse.data.url);
459
606
  });
460
607
 
461
- test.serial('should handle file upload without hash', async t => {
608
+ test.serial('should handle file upload without hash', async (t) => {
462
609
  const form = new FormData();
463
610
  form.append('file', Buffer.from('test content'), 'test.txt');
464
-
611
+
465
612
  const response = await axios.post(
466
613
  `http://localhost:${port}/api/CortexFileHandler`,
467
614
  form,
468
615
  {
469
616
  headers: {
470
617
  ...form.getHeaders(),
471
- 'Content-Type': 'multipart/form-data'
618
+ 'Content-Type': 'multipart/form-data',
472
619
  },
473
- validateStatus: status => true,
474
- timeout: 5000
475
- }
620
+ validateStatus: (status) => true,
621
+ timeout: 5000,
622
+ },
476
623
  );
477
-
624
+
478
625
  t.is(response.status, 200, 'Upload should succeed');
479
626
  t.truthy(response.data.url, 'Response should contain file URL');
480
627
 
481
628
  await cleanupUploadedFile(t, response.data.url);
482
629
  });
483
630
 
484
- test.serial('should handle upload with empty file', async t => {
631
+ test.serial('should handle upload with empty file', async (t) => {
485
632
  const form = new FormData();
486
633
  // Empty file
487
634
  form.append('file', Buffer.from(''), 'empty.txt');
488
-
635
+
489
636
  const response = await axios.post(
490
637
  `http://localhost:${port}/api/CortexFileHandler`,
491
638
  form,
492
639
  {
493
640
  headers: {
494
641
  ...form.getHeaders(),
495
- 'Content-Type': 'multipart/form-data'
642
+ 'Content-Type': 'multipart/form-data',
496
643
  },
497
- validateStatus: status => true,
498
- timeout: 5000
499
- }
644
+ validateStatus: (status) => true,
645
+ timeout: 5000,
646
+ },
500
647
  );
501
-
502
- t.is(response.status, 200, 'Should accept empty file');
503
- t.truthy(response.data.url, 'Should return URL for empty file');
504
-
505
- await cleanupUploadedFile(t, response.data.url);
506
- });
507
648
 
508
- test.serial('should handle complete upload-request-delete-verify sequence', async t => {
509
- const testContent = 'test content for sequence';
510
- const testHash = 'test-sequence-hash';
511
- const form = new FormData();
512
- form.append('file', Buffer.from(testContent), 'sequence-test.txt');
513
- form.append('hash', testHash);
514
-
515
- // Upload file with hash
516
- const uploadResponse = await axios.post(baseUrl, form, {
517
- headers: form.getHeaders(),
518
- validateStatus: status => true,
519
- timeout: 5000
520
- });
521
- t.is(uploadResponse.status, 200, 'Upload should succeed');
522
- t.truthy(uploadResponse.data.url, 'Response should contain URL');
523
-
524
- await cleanupUploadedFile(t, uploadResponse.data.url);
525
-
526
- // Verify hash is gone by trying to get the file URL
527
- const hashCheckResponse = await axios.get(`${baseUrl}`, {
528
- params: {
529
- hash: testHash,
530
- checkHash: true
531
- },
532
- validateStatus: status => true
533
- });
534
- t.is(hashCheckResponse.status, 404, 'Hash should not exist after deletion');
649
+ t.is(response.status, 400, 'Should reject empty file');
650
+ t.is(response.data, 'Invalid file: file is empty', 'Should return proper error message');
535
651
  });
536
652
 
537
- test.serial('should handle multiple file uploads with unique hashes', async t => {
538
- const uploadedFiles = [];
539
-
540
- // Upload 10 files
541
- for (let i = 0; i < 10; i++) {
542
- const content = `test content for file ${i}`;
653
+ test.serial(
654
+ 'should handle complete upload-request-delete-verify sequence',
655
+ async (t) => {
656
+ const testContent = 'test content for sequence';
657
+ const testHash = 'test-sequence-hash';
543
658
  const form = new FormData();
544
- form.append('file', Buffer.from(content), `file-${i}.txt`);
545
-
659
+ form.append('file', Buffer.from(testContent), 'sequence-test.txt');
660
+ form.append('hash', testHash);
661
+
662
+ // Upload file with hash
546
663
  const uploadResponse = await axios.post(baseUrl, form, {
547
664
  headers: form.getHeaders(),
548
- validateStatus: status => true,
549
- timeout: 5000
550
- });
551
- t.is(uploadResponse.status, 200, `Upload should succeed for file ${i}`);
552
-
553
- const url = uploadResponse.data.url;
554
- t.truthy(url, `Response should contain URL for file ${i}`);
555
-
556
- uploadedFiles.push({
557
- url: convertToLocalUrl(url),
558
- content
665
+ validateStatus: (status) => true,
666
+ timeout: 5000,
559
667
  });
560
-
561
- // Small delay between uploads
562
- await new Promise(resolve => setTimeout(resolve, 100));
563
- }
564
-
565
- // Verify files are stored and can be fetched
566
- for (const file of uploadedFiles) {
567
- const fileResponse = await axios.get(file.url, {
568
- validateStatus: status => true,
569
- timeout: 5000
668
+ t.is(uploadResponse.status, 200, 'Upload should succeed');
669
+ t.truthy(uploadResponse.data.url, 'Response should contain URL');
670
+
671
+ await cleanupUploadedFile(t, uploadResponse.data.url);
672
+
673
+ // Verify hash is gone by trying to get the file URL
674
+ const hashCheckResponse = await axios.get(`${baseUrl}`, {
675
+ params: {
676
+ hash: testHash,
677
+ checkHash: true,
678
+ },
679
+ validateStatus: (status) => true,
570
680
  });
571
- t.is(fileResponse.status, 200, `File should be accessible at ${file.url}`);
572
- t.is(fileResponse.data, file.content, `File content should match original content`);
573
- }
574
-
575
- // Clean up all files
576
- for (const file of uploadedFiles) {
577
- await cleanupUploadedFile(t, file.url);
578
- }
579
- });
681
+ t.is(hashCheckResponse.status, 404, 'Hash should not exist after deletion');
682
+ },
683
+ );
684
+
685
+ test.serial(
686
+ 'should handle multiple file uploads with unique hashes',
687
+ async (t) => {
688
+ const uploadedFiles = [];
689
+
690
+ // Upload 10 files
691
+ for (let i = 0; i < 10; i++) {
692
+ const content = `test content for file ${i}`;
693
+ const form = new FormData();
694
+ form.append('file', Buffer.from(content), `file-${i}.txt`);
695
+
696
+ const uploadResponse = await axios.post(baseUrl, form, {
697
+ headers: form.getHeaders(),
698
+ validateStatus: (status) => true,
699
+ timeout: 5000,
700
+ });
701
+ t.is(uploadResponse.status, 200, `Upload should succeed for file ${i}`);
702
+
703
+ const url = uploadResponse.data.url;
704
+ t.truthy(url, `Response should contain URL for file ${i}`);
705
+
706
+ uploadedFiles.push({
707
+ url: convertToLocalUrl(url),
708
+ content,
709
+ });
710
+
711
+ // Small delay between uploads
712
+ await new Promise((resolve) => setTimeout(resolve, 100));
713
+ }
714
+
715
+ // Verify files are stored and can be fetched
716
+ for (const file of uploadedFiles) {
717
+ const fileResponse = await axios.get(file.url, {
718
+ validateStatus: (status) => true,
719
+ timeout: 5000,
720
+ });
721
+ t.is(
722
+ fileResponse.status,
723
+ 200,
724
+ `File should be accessible at ${file.url}`,
725
+ );
726
+ t.is(
727
+ fileResponse.data,
728
+ file.content,
729
+ 'File content should match original content',
730
+ );
731
+ }
732
+
733
+ // Clean up all files
734
+ for (const file of uploadedFiles) {
735
+ await cleanupUploadedFile(t, file.url);
736
+ }
737
+ },
738
+ );
580
739
 
581
740
  // Example of a hash-specific test that only runs with Azure
582
- test.serial('should handle hash reuse with Azure storage', async t => {
741
+ test.serial('should handle hash reuse with Azure storage', async (t) => {
583
742
  if (!isAzureConfigured()) {
584
743
  t.pass('Skipping hash test - Azure not configured');
585
744
  return;
@@ -590,62 +749,84 @@ test.serial('should handle hash reuse with Azure storage', async t => {
590
749
  const form = new FormData();
591
750
  form.append('file', Buffer.from(testContent), 'test.txt');
592
751
  form.append('hash', testHash);
593
-
752
+
594
753
  // First upload
595
754
  const upload1 = await axios.post(baseUrl, form, {
596
755
  headers: form.getHeaders(),
597
- validateStatus: status => true,
598
- timeout: 5000
756
+ validateStatus: (status) => true,
757
+ timeout: 5000,
599
758
  });
600
759
  t.is(upload1.status, 200, 'First upload should succeed');
601
760
  const originalUrl = upload1.data.url;
602
-
761
+
603
762
  // Check hash exists and returns the correct URL
604
- const hashCheck1 = await axios.get(baseUrl, { hash: testHash, checkHash: true }, {
605
- validateStatus: status => true
606
- });
763
+ const hashCheck1 = await axios.get(
764
+ baseUrl,
765
+ { hash: testHash, checkHash: true },
766
+ {
767
+ validateStatus: (status) => true,
768
+ },
769
+ );
607
770
  t.is(hashCheck1.status, 200, 'Hash should exist after first upload');
608
771
  t.truthy(hashCheck1.data.url, 'Hash check should return URL');
609
- t.is(hashCheck1.data.url, originalUrl, 'Hash check should return original upload URL');
610
-
772
+ t.is(
773
+ hashCheck1.data.url,
774
+ originalUrl,
775
+ 'Hash check should return original upload URL',
776
+ );
777
+
611
778
  // Verify file is accessible via URL from hash check
612
779
  const fileResponse = await axios.get(convertToLocalUrl(hashCheck1.data.url), {
613
- validateStatus: status => true,
614
- timeout: 5000
780
+ validateStatus: (status) => true,
781
+ timeout: 5000,
615
782
  });
616
783
  t.is(fileResponse.status, 200, 'File should be accessible');
617
784
  t.is(fileResponse.data, testContent, 'File content should match original');
618
-
785
+
619
786
  // Second upload with same hash
620
787
  const upload2 = await axios.post(baseUrl, form, {
621
788
  headers: form.getHeaders(),
622
- validateStatus: status => true,
623
- timeout: 5000
789
+ validateStatus: (status) => true,
790
+ timeout: 5000,
624
791
  });
625
792
  t.is(upload2.status, 200, 'Second upload should succeed');
626
793
  t.is(upload2.data.url, originalUrl, 'URLs should match for same hash');
627
-
794
+
628
795
  // Verify file is still accessible after second upload
629
796
  const fileResponse2 = await axios.get(convertToLocalUrl(upload2.data.url), {
630
- validateStatus: status => true,
631
- timeout: 5000
797
+ validateStatus: (status) => true,
798
+ timeout: 5000,
632
799
  });
633
800
  t.is(fileResponse2.status, 200, 'File should still be accessible');
634
- t.is(fileResponse2.data, testContent, 'File content should still match original');
635
-
801
+ t.is(
802
+ fileResponse2.data,
803
+ testContent,
804
+ 'File content should still match original',
805
+ );
806
+
636
807
  // Clean up
637
808
  await cleanupUploadedFile(t, originalUrl);
638
-
809
+
639
810
  // Verify hash is now gone
640
- const hashCheckAfterDelete = await axios.get(baseUrl, { hash: testHash, checkHash: true }, {
641
- validateStatus: status => true
642
- });
643
- t.is(hashCheckAfterDelete.status, 404, 'Hash should be gone after file deletion');
811
+ const hashCheckAfterDelete = await axios.get(
812
+ baseUrl,
813
+ { hash: testHash, checkHash: true },
814
+ {
815
+ validateStatus: (status) => true,
816
+ },
817
+ );
818
+ t.is(
819
+ hashCheckAfterDelete.status,
820
+ 404,
821
+ 'Hash should be gone after file deletion',
822
+ );
644
823
  });
645
824
 
646
825
  // Helper to check if GCS is configured
647
826
  function isGCSConfigured() {
648
- return process.env.GCP_SERVICE_ACCOUNT_KEY && process.env.STORAGE_EMULATOR_HOST;
827
+ return (
828
+ process.env.GCP_SERVICE_ACCOUNT_KEY && process.env.STORAGE_EMULATOR_HOST
829
+ );
649
830
  }
650
831
 
651
832
  // Helper function to check if file exists in fake GCS
@@ -653,28 +834,37 @@ async function checkGCSFile(gcsUrl) {
653
834
  // Convert gs:// URL to bucket and object path
654
835
  const [, , bucket, ...objectParts] = gcsUrl.split('/');
655
836
  const object = objectParts.join('/');
656
-
837
+
657
838
  // Query fake-gcs-server
658
- const response = await axios.get(`http://localhost:4443/storage/v1/b/${bucket}/o/${encodeURIComponent(object)}`, {
659
- validateStatus: status => true
660
- });
839
+ const response = await axios.get(
840
+ `http://localhost:4443/storage/v1/b/${bucket}/o/${encodeURIComponent(object)}`,
841
+ {
842
+ validateStatus: (status) => true,
843
+ },
844
+ );
661
845
  return response.status === 200;
662
846
  }
663
847
 
664
848
  // Helper function to verify file exists in both storages
665
849
  async function verifyFileInBothStorages(t, uploadResponse) {
666
850
  // Verify Azure URL is accessible
667
- const azureResponse = await axios.get(convertToLocalUrl(uploadResponse.data.url), {
668
- validateStatus: status => true,
669
- timeout: 5000
670
- });
851
+ const azureResponse = await axios.get(
852
+ convertToLocalUrl(uploadResponse.data.url),
853
+ {
854
+ validateStatus: (status) => true,
855
+ timeout: 5000,
856
+ },
857
+ );
671
858
  t.is(azureResponse.status, 200, 'File should be accessible in Azure');
672
859
 
673
860
  if (isGCSConfigured()) {
674
- // Verify GCS URL exists and is in correct format
861
+ // Verify GCS URL exists and is in correct format
675
862
  t.truthy(uploadResponse.data.gcs, 'Response should contain GCS URL');
676
- t.true(uploadResponse.data.gcs.startsWith('gs://'), 'GCS URL should use gs:// protocol');
677
-
863
+ t.true(
864
+ uploadResponse.data.gcs.startsWith('gs://'),
865
+ 'GCS URL should use gs:// protocol',
866
+ );
867
+
678
868
  // Check if file exists in fake GCS
679
869
  const exists = await checkGCSFile(uploadResponse.data.gcs);
680
870
  t.true(exists, 'File should exist in GCS');
@@ -684,53 +874,67 @@ async function verifyFileInBothStorages(t, uploadResponse) {
684
874
  // Helper function to verify file is deleted from both storages
685
875
  async function verifyFileDeletedFromBothStorages(t, uploadResponse) {
686
876
  // Verify Azure URL is no longer accessible
687
- const azureResponse = await axios.get(convertToLocalUrl(uploadResponse.data.url), {
688
- validateStatus: status => true,
689
- timeout: 5000
690
- });
877
+ const azureResponse = await axios.get(
878
+ convertToLocalUrl(uploadResponse.data.url),
879
+ {
880
+ validateStatus: (status) => true,
881
+ timeout: 5000,
882
+ },
883
+ );
691
884
  t.is(azureResponse.status, 404, 'File should not be accessible in Azure');
692
885
 
693
886
  if (isGCSConfigured()) {
694
- // Verify file is also deleted from GCS
887
+ // Verify file is also deleted from GCS
695
888
  const exists = await checkGCSFile(uploadResponse.data.gcs);
696
889
  t.false(exists, 'File should not exist in GCS');
697
890
  }
698
891
  }
699
892
 
700
- test.serial('should handle dual storage upload and cleanup when GCS configured', async t => {
701
- if (!isGCSConfigured()) {
702
- t.pass('Skipping test - GCS not configured');
703
- return;
704
- }
893
+ test.serial(
894
+ 'should handle dual storage upload and cleanup when GCS configured',
895
+ async (t) => {
896
+ if (!isGCSConfigured()) {
897
+ t.pass('Skipping test - GCS not configured');
898
+ return;
899
+ }
705
900
 
706
- const requestId = uuidv4();
707
- const testContent = 'test content for dual storage';
708
- const form = new FormData();
709
- form.append('file', Buffer.from(testContent), 'dual-test.txt');
710
- form.append('requestId', requestId);
901
+ const requestId = uuidv4();
902
+ const testContent = 'test content for dual storage';
903
+ const form = new FormData();
904
+ form.append('file', Buffer.from(testContent), 'dual-test.txt');
905
+ form.append('requestId', requestId);
711
906
 
712
- // Upload file
713
- const uploadResponse = await uploadFile(Buffer.from(testContent), requestId);
714
- t.is(uploadResponse.status, 200, 'Upload should succeed');
715
- t.truthy(uploadResponse.data.url, 'Response should contain Azure URL');
716
- t.truthy(uploadResponse.data.gcs, 'Response should contain GCS URL');
717
- t.true(uploadResponse.data.gcs.startsWith('gs://'), 'GCS URL should use gs:// protocol');
907
+ // Upload file
908
+ const uploadResponse = await uploadFile(
909
+ Buffer.from(testContent),
910
+ requestId,
911
+ );
912
+ t.is(uploadResponse.status, 200, 'Upload should succeed');
913
+ t.truthy(uploadResponse.data.url, 'Response should contain Azure URL');
914
+ t.truthy(uploadResponse.data.gcs, 'Response should contain GCS URL');
915
+ t.true(
916
+ uploadResponse.data.gcs.startsWith('gs://'),
917
+ 'GCS URL should use gs:// protocol',
918
+ );
718
919
 
719
- // Verify file exists in both storages
720
- await verifyFileInBothStorages(t, uploadResponse);
920
+ // Verify file exists in both storages
921
+ await verifyFileInBothStorages(t, uploadResponse);
721
922
 
722
- // Get the folder name (requestId) from the URL
723
- const fileRequestId = getFolderNameFromUrl(uploadResponse.data.url);
923
+ // Get the folder name (requestId) from the URL
924
+ const fileRequestId = getFolderNameFromUrl(uploadResponse.data.url);
724
925
 
725
- // Delete file using the correct requestId
726
- const deleteResponse = await axios.delete(`${baseUrl}?operation=delete&requestId=${fileRequestId}`);
727
- t.is(deleteResponse.status, 200, 'Delete should succeed');
926
+ // Delete file using the correct requestId
927
+ const deleteResponse = await axios.delete(
928
+ `${baseUrl}?operation=delete&requestId=${fileRequestId}`,
929
+ );
930
+ t.is(deleteResponse.status, 200, 'Delete should succeed');
728
931
 
729
- // Verify file is deleted from both storages
730
- await verifyFileDeletedFromBothStorages(t, uploadResponse);
731
- });
932
+ // Verify file is deleted from both storages
933
+ await verifyFileDeletedFromBothStorages(t, uploadResponse);
934
+ },
935
+ );
732
936
 
733
- test.serial('should handle GCS URL format and accessibility', async t => {
937
+ test.serial('should handle GCS URL format and accessibility', async (t) => {
734
938
  if (!isGCSConfigured()) {
735
939
  t.pass('Skipping test - GCS not configured');
736
940
  return;
@@ -742,18 +946,25 @@ test.serial('should handle GCS URL format and accessibility', async t => {
742
946
  form.append('file', Buffer.from(testContent), 'gcs-url-test.txt');
743
947
 
744
948
  // Upload with explicit GCS preference
745
- const uploadResponse = await axios.post(`http://localhost:${port}/api/CortexFileHandler`, form, {
746
- params: {
747
- operation: 'upload',
748
- requestId,
749
- useGCS: true
949
+ const uploadResponse = await axios.post(
950
+ `http://localhost:${port}/api/CortexFileHandler`,
951
+ form,
952
+ {
953
+ params: {
954
+ operation: 'upload',
955
+ requestId,
956
+ useGCS: true,
957
+ },
958
+ headers: form.getHeaders(),
750
959
  },
751
- headers: form.getHeaders()
752
- });
960
+ );
753
961
 
754
962
  t.is(uploadResponse.status, 200, 'Upload should succeed');
755
963
  t.truthy(uploadResponse.data.gcs, 'Response should contain GCS URL');
756
- t.true(uploadResponse.data.gcs.startsWith('gs://'), 'GCS URL should use gs:// protocol');
964
+ t.true(
965
+ uploadResponse.data.gcs.startsWith('gs://'),
966
+ 'GCS URL should use gs:// protocol',
967
+ );
757
968
 
758
969
  // Verify content is accessible via normal URL since we can't directly access gs:// URLs
759
970
  const fileResponse = await axios.get(uploadResponse.data.url);
@@ -773,22 +984,25 @@ async function createAndUploadTestFile() {
773
984
 
774
985
  // Generate a real MP3 file using ffmpeg
775
986
  try {
776
- execSync(`ffmpeg -f lavfi -i anullsrc=r=44100:cl=mono -t 10 -q:a 9 -acodec libmp3lame "${tempFile}"`, {
777
- stdio: ['ignore', 'pipe', 'pipe']
778
- });
987
+ execSync(
988
+ `ffmpeg -f lavfi -i anullsrc=r=44100:cl=mono -t 10 -q:a 9 -acodec libmp3lame "${tempFile}"`,
989
+ {
990
+ stdio: ['ignore', 'pipe', 'pipe'],
991
+ },
992
+ );
779
993
 
780
994
  // Upload the real media file
781
995
  const form = new FormData();
782
996
  form.append('file', fs.createReadStream(tempFile));
783
-
997
+
784
998
  const uploadResponse = await axios.post(baseUrl, form, {
785
999
  headers: form.getHeaders(),
786
- validateStatus: status => true,
787
- timeout: 5000
1000
+ validateStatus: (status) => true,
1001
+ timeout: 5000,
788
1002
  });
789
1003
 
790
1004
  // Wait a short time to ensure file is available
791
- await new Promise(resolve => setTimeout(resolve, 1000));
1005
+ await new Promise((resolve) => setTimeout(resolve, 1000));
792
1006
 
793
1007
  // Clean up temp file
794
1008
  fs.rmSync(tempDir, { recursive: true, force: true });
@@ -800,69 +1014,91 @@ async function createAndUploadTestFile() {
800
1014
  }
801
1015
  }
802
1016
 
803
- test.serial('should handle chunking with GCS integration when configured', async t => {
804
- if (!isGCSConfigured()) {
805
- t.pass('Skipping test - GCS not configured');
806
- return;
807
- }
808
-
809
- // Create a large test file first
810
- const testFileUrl = await createAndUploadTestFile();
811
- const requestId = uuidv4();
1017
+ test.serial(
1018
+ 'should handle chunking with GCS integration when configured',
1019
+ async (t) => {
1020
+ if (!isGCSConfigured()) {
1021
+ t.pass('Skipping test - GCS not configured');
1022
+ return;
1023
+ }
812
1024
 
813
- // Request chunking via GET
814
- const chunkResponse = await axios.get(baseUrl, {
815
- params: {
816
- uri: testFileUrl,
817
- requestId
818
- },
819
- validateStatus: status => true,
820
- timeout: 5000
821
- });
1025
+ // Create a large test file first
1026
+ const testFileUrl = await createAndUploadTestFile();
1027
+ const requestId = uuidv4();
822
1028
 
823
- t.is(chunkResponse.status, 200, 'Chunked request should succeed');
824
- t.truthy(chunkResponse.data, 'Response should contain data');
825
- t.true(Array.isArray(chunkResponse.data), 'Response should be an array');
826
- t.true(chunkResponse.data.length > 0, 'Should have created at least one chunk');
827
-
828
- // Verify each chunk exists in both Azure/Local and GCS
829
- for (const chunk of chunkResponse.data) {
830
- // Verify Azure/Local URL is accessible
831
- const azureResponse = await axios.get(convertToLocalUrl(chunk.uri), {
832
- validateStatus: status => true,
833
- timeout: 5000
1029
+ // Request chunking via GET
1030
+ const chunkResponse = await axios.get(baseUrl, {
1031
+ params: {
1032
+ uri: testFileUrl,
1033
+ requestId,
1034
+ },
1035
+ validateStatus: (status) => true,
1036
+ timeout: 5000,
834
1037
  });
835
- t.is(azureResponse.status, 200, `Chunk should be accessible in Azure/Local: ${chunk.uri}`);
836
-
837
- // Verify GCS URL exists and is in correct format
838
- t.truthy(chunk.gcs, 'Chunk should contain GCS URL');
839
- t.true(chunk.gcs.startsWith('gs://'), 'GCS URL should use gs:// protocol');
840
-
841
- // Check if chunk exists in fake GCS
842
- const exists = await checkGCSFile(chunk.gcs);
843
- t.true(exists, `Chunk should exist in GCS: ${chunk.gcs}`);
844
- }
845
1038
 
846
- // Clean up chunks
847
- const deleteResponse = await axios.delete(`${baseUrl}?operation=delete&requestId=${requestId}`);
848
- t.is(deleteResponse.status, 200, 'Delete should succeed');
1039
+ t.is(chunkResponse.status, 200, 'Chunked request should succeed');
1040
+ t.truthy(chunkResponse.data, 'Response should contain data');
1041
+ t.true(Array.isArray(chunkResponse.data), 'Response should be an array');
1042
+ t.true(
1043
+ chunkResponse.data.length > 0,
1044
+ 'Should have created at least one chunk',
1045
+ );
849
1046
 
850
- // Verify all chunks are deleted from both storages
851
- for (const chunk of chunkResponse.data) {
852
- // Verify Azure/Local chunk is gone
853
- const azureResponse = await axios.get(convertToLocalUrl(chunk.uri), {
854
- validateStatus: status => true,
855
- timeout: 5000
856
- });
857
- t.is(azureResponse.status, 404, `Chunk should not be accessible in Azure/Local after deletion: ${chunk.uri}`);
1047
+ // Verify each chunk exists in both Azure/Local and GCS
1048
+ for (const chunk of chunkResponse.data) {
1049
+ // Verify Azure/Local URL is accessible
1050
+ const azureResponse = await axios.get(convertToLocalUrl(chunk.uri), {
1051
+ validateStatus: (status) => true,
1052
+ timeout: 5000,
1053
+ });
1054
+ t.is(
1055
+ azureResponse.status,
1056
+ 200,
1057
+ `Chunk should be accessible in Azure/Local: ${chunk.uri}`,
1058
+ );
1059
+
1060
+ // Verify GCS URL exists and is in correct format
1061
+ t.truthy(chunk.gcs, 'Chunk should contain GCS URL');
1062
+ t.true(
1063
+ chunk.gcs.startsWith('gs://'),
1064
+ 'GCS URL should use gs:// protocol',
1065
+ );
1066
+
1067
+ // Check if chunk exists in fake GCS
1068
+ const exists = await checkGCSFile(chunk.gcs);
1069
+ t.true(exists, `Chunk should exist in GCS: ${chunk.gcs}`);
1070
+ }
858
1071
 
859
- // Verify GCS chunk is gone
860
- const exists = await checkGCSFile(chunk.gcs);
861
- t.false(exists, `Chunk should not exist in GCS after deletion: ${chunk.gcs}`);
862
- }
863
- });
1072
+ // Clean up chunks
1073
+ const deleteResponse = await axios.delete(
1074
+ `${baseUrl}?operation=delete&requestId=${requestId}`,
1075
+ );
1076
+ t.is(deleteResponse.status, 200, 'Delete should succeed');
1077
+
1078
+ // Verify all chunks are deleted from both storages
1079
+ for (const chunk of chunkResponse.data) {
1080
+ // Verify Azure/Local chunk is gone
1081
+ const azureResponse = await axios.get(convertToLocalUrl(chunk.uri), {
1082
+ validateStatus: (status) => true,
1083
+ timeout: 5000,
1084
+ });
1085
+ t.is(
1086
+ azureResponse.status,
1087
+ 404,
1088
+ `Chunk should not be accessible in Azure/Local after deletion: ${chunk.uri}`,
1089
+ );
1090
+
1091
+ // Verify GCS chunk is gone
1092
+ const exists = await checkGCSFile(chunk.gcs);
1093
+ t.false(
1094
+ exists,
1095
+ `Chunk should not exist in GCS after deletion: ${chunk.gcs}`,
1096
+ );
1097
+ }
1098
+ },
1099
+ );
864
1100
 
865
- test.serial('should handle chunking errors gracefully with GCS', async t => {
1101
+ test.serial('should handle chunking errors gracefully with GCS', async (t) => {
866
1102
  if (!isGCSConfigured()) {
867
1103
  t.pass('Skipping test - GCS not configured');
868
1104
  return;
@@ -870,201 +1106,266 @@ test.serial('should handle chunking errors gracefully with GCS', async t => {
870
1106
 
871
1107
  // Create a test file to get a valid URL format
872
1108
  const validFileUrl = await createAndUploadTestFile();
873
-
1109
+
874
1110
  // Test with invalid URL that matches the format of our real URLs
875
1111
  const invalidUrl = validFileUrl.replace(/[^/]+$/, 'nonexistent-file.mp3');
876
1112
  const invalidResponse = await axios.get(baseUrl, {
877
1113
  params: {
878
1114
  uri: invalidUrl,
879
- requestId: uuidv4()
1115
+ requestId: uuidv4(),
880
1116
  },
881
- validateStatus: status => true,
882
- timeout: 5000
1117
+ validateStatus: (status) => true,
1118
+ timeout: 5000,
883
1119
  });
884
-
1120
+
885
1121
  t.is(invalidResponse.status, 500, 'Should reject nonexistent file URL');
886
- t.true(invalidResponse.data.includes('Error processing media file'), 'Should indicate error processing media file');
1122
+ t.true(
1123
+ invalidResponse.data.includes('Error processing media file'),
1124
+ 'Should indicate error processing media file',
1125
+ );
887
1126
 
888
1127
  // Test with missing URI
889
1128
  const noUriResponse = await axios.get(baseUrl, {
890
1129
  params: {
891
- requestId: uuidv4()
1130
+ requestId: uuidv4(),
892
1131
  },
893
- validateStatus: status => true,
894
- timeout: 5000
1132
+ validateStatus: (status) => true,
1133
+ timeout: 5000,
895
1134
  });
896
-
1135
+
897
1136
  t.is(noUriResponse.status, 400, 'Should reject request with no URI');
898
1137
  t.is(
899
1138
  noUriResponse.data,
900
1139
  'Please pass a uri and requestId on the query string or in the request body',
901
- 'Should return proper error message'
1140
+ 'Should return proper error message',
902
1141
  );
903
1142
  });
904
1143
 
905
1144
  // Legacy MediaFileChunker Tests
906
- test.serial('should handle file upload through legacy MediaFileChunker endpoint', async t => {
907
- const form = new FormData();
908
- form.append('file', Buffer.from('test content'), 'test.txt');
909
-
910
- const response = await axios.post(
911
- `http://localhost:${port}/api/MediaFileChunker`,
912
- form,
913
- {
914
- headers: {
915
- ...form.getHeaders(),
916
- 'Content-Type': 'multipart/form-data'
1145
+ test.serial(
1146
+ 'should handle file upload through legacy MediaFileChunker endpoint',
1147
+ async (t) => {
1148
+ const form = new FormData();
1149
+ form.append('file', Buffer.from('test content'), 'test.txt');
1150
+
1151
+ const response = await axios.post(
1152
+ `http://localhost:${port}/api/MediaFileChunker`,
1153
+ form,
1154
+ {
1155
+ headers: {
1156
+ ...form.getHeaders(),
1157
+ 'Content-Type': 'multipart/form-data',
1158
+ },
1159
+ validateStatus: (status) => true,
1160
+ timeout: 5000,
917
1161
  },
918
- validateStatus: status => true,
919
- timeout: 5000
920
- }
921
- );
922
-
923
- t.is(response.status, 200, 'Upload through legacy endpoint should succeed');
924
- t.truthy(response.data.url, 'Response should contain file URL');
1162
+ );
925
1163
 
926
- await cleanupUploadedFile(t, response.data.url);
927
- });
1164
+ t.is(response.status, 200, 'Upload through legacy endpoint should succeed');
1165
+ t.truthy(response.data.url, 'Response should contain file URL');
928
1166
 
929
- test.serial('should handle hash operations through legacy MediaFileChunker endpoint', async t => {
930
- const testHash = 'test-hash-legacy';
931
- const form = new FormData();
932
- form.append('file', Buffer.from('test content'), 'test.txt');
933
- form.append('hash', testHash);
934
-
935
- // Upload file with hash through legacy endpoint
936
- const uploadResponse = await axios.post(
937
- `http://localhost:${port}/api/MediaFileChunker`,
938
- form,
939
- {
940
- headers: {
941
- ...form.getHeaders(),
942
- 'Content-Type': 'multipart/form-data'
943
- },
944
- validateStatus: status => true,
945
- timeout: 5000
1167
+ await cleanupUploadedFile(t, response.data.url);
1168
+ },
1169
+ );
1170
+
1171
+ test.serial(
1172
+ 'should handle hash operations through legacy MediaFileChunker endpoint',
1173
+ async (t) => {
1174
+ const testHash = 'test-hash-legacy';
1175
+ const form = new FormData();
1176
+ form.append('file', Buffer.from('test content'), 'test.txt');
1177
+ form.append('hash', testHash);
1178
+
1179
+ let uploadedUrl;
1180
+ try {
1181
+ // Upload file with hash through legacy endpoint
1182
+ const uploadResponse = await axios.post(
1183
+ `http://localhost:${port}/api/MediaFileChunker`,
1184
+ form,
1185
+ {
1186
+ headers: {
1187
+ ...form.getHeaders(),
1188
+ 'Content-Type': 'multipart/form-data',
1189
+ },
1190
+ validateStatus: (status) => true,
1191
+ timeout: 5000,
1192
+ },
1193
+ );
1194
+
1195
+ t.is(
1196
+ uploadResponse.status,
1197
+ 200,
1198
+ 'Upload should succeed through legacy endpoint',
1199
+ );
1200
+ t.truthy(uploadResponse.data.url, 'Response should contain file URL');
1201
+ uploadedUrl = uploadResponse.data.url;
1202
+
1203
+ // Wait a bit for Redis to be updated
1204
+ await new Promise((resolve) => setTimeout(resolve, 1000));
1205
+
1206
+ // Check hash through legacy endpoint
1207
+ const hashCheckResponse = await axios.get(
1208
+ `http://localhost:${port}/api/MediaFileChunker`,
1209
+ {
1210
+ params: {
1211
+ hash: testHash,
1212
+ checkHash: true,
1213
+ },
1214
+ validateStatus: (status) => true,
1215
+ timeout: 5000,
1216
+ },
1217
+ );
1218
+
1219
+ t.is(hashCheckResponse.status, 200, 'Hash check should return 200 for uploaded hash');
1220
+ t.truthy(hashCheckResponse.data.url, 'Hash check should return file URL');
1221
+ } finally {
1222
+ await cleanupHashAndFile(testHash, uploadedUrl, `http://localhost:${port}/api/MediaFileChunker`);
946
1223
  }
947
- );
948
-
949
- t.is(uploadResponse.status, 200, 'Upload should succeed through legacy endpoint');
950
- t.truthy(uploadResponse.data.url, 'Response should contain file URL');
951
-
952
- // Wait a bit for Redis to be updated
953
- await new Promise(resolve => setTimeout(resolve, 1000));
954
-
955
- // Check hash through legacy endpoint
956
- const hashCheckResponse = await axios.get(`http://localhost:${port}/api/MediaFileChunker`, {
957
- params: {
958
- hash: testHash,
959
- checkHash: true
960
- },
961
- validateStatus: status => true,
962
- timeout: 5000
963
- });
964
-
965
- t.is(hashCheckResponse.status, 404, 'Hash check should return 404 for new hash');
966
- t.is(hashCheckResponse.data, `Hash ${testHash} not found`, 'Should indicate hash not found');
1224
+ },
1225
+ );
1226
+
1227
+ test.serial(
1228
+ 'should handle delete operation through legacy MediaFileChunker endpoint',
1229
+ async (t) => {
1230
+ const testRequestId = 'test-delete-request-legacy';
1231
+ const testContent = 'test content';
1232
+ const form = new FormData();
1233
+ form.append('file', Buffer.from(testContent), 'test.txt');
967
1234
 
968
- await cleanupUploadedFile(t, uploadResponse.data.url);
969
- });
1235
+ // Upload a file first through legacy endpoint
1236
+ const uploadResponse = await axios.post(
1237
+ `http://localhost:${port}/api/MediaFileChunker`,
1238
+ form,
1239
+ {
1240
+ headers: form.getHeaders(),
1241
+ validateStatus: (status) => true,
1242
+ timeout: 5000,
1243
+ },
1244
+ );
1245
+ t.is(
1246
+ uploadResponse.status,
1247
+ 200,
1248
+ 'Upload should succeed through legacy endpoint',
1249
+ );
970
1250
 
971
- test.serial('should handle delete operation through legacy MediaFileChunker endpoint', async t => {
972
- const testRequestId = 'test-delete-request-legacy';
973
- const testContent = 'test content';
974
- const form = new FormData();
975
- form.append('file', Buffer.from(testContent), 'test.txt');
976
-
977
- // Upload a file first through legacy endpoint
978
- const uploadResponse = await axios.post(
979
- `http://localhost:${port}/api/MediaFileChunker`,
980
- form,
981
- {
982
- headers: form.getHeaders(),
983
- validateStatus: status => true,
984
- timeout: 5000
985
- }
986
- );
987
- t.is(uploadResponse.status, 200, 'Upload should succeed through legacy endpoint');
988
-
989
- // Extract the folder name from the URL
990
- const url = uploadResponse.data.url;
991
- const folderName = getFolderNameFromUrl(url);
992
-
993
- // Delete the file through legacy endpoint
994
- const deleteResponse = await axios.delete(`http://localhost:${port}/api/MediaFileChunker?operation=delete&requestId=${folderName}`);
995
- t.is(deleteResponse.status, 200, 'Delete should succeed through legacy endpoint');
996
- t.true(Array.isArray(deleteResponse.data.body), 'Response should be an array of deleted files');
997
- t.true(deleteResponse.data.body.length > 0, 'Should have deleted at least one file');
998
- t.true(deleteResponse.data.body[0].includes(folderName), 'Deleted file should contain folder name');
999
- });
1251
+ // Extract the folder name from the URL
1252
+ const url = uploadResponse.data.url;
1253
+ const folderName = getFolderNameFromUrl(url);
1254
+
1255
+ // Delete the file through legacy endpoint
1256
+ const deleteResponse = await axios.delete(
1257
+ `http://localhost:${port}/api/MediaFileChunker?operation=delete&requestId=${folderName}`,
1258
+ );
1259
+ t.is(
1260
+ deleteResponse.status,
1261
+ 200,
1262
+ 'Delete should succeed through legacy endpoint',
1263
+ );
1264
+ t.true(
1265
+ Array.isArray(deleteResponse.data.body),
1266
+ 'Response should be an array of deleted files',
1267
+ );
1268
+ t.true(
1269
+ deleteResponse.data.body.length > 0,
1270
+ 'Should have deleted at least one file',
1271
+ );
1272
+ t.true(
1273
+ deleteResponse.data.body[0].includes(folderName),
1274
+ 'Deleted file should contain folder name',
1275
+ );
1276
+ },
1277
+ );
1000
1278
 
1001
- test.serial('should handle parameter validation through legacy MediaFileChunker endpoint', async t => {
1279
+ test.serial(
1280
+ 'should handle parameter validation through legacy MediaFileChunker endpoint',
1281
+ async (t) => {
1002
1282
  // Test missing parameters
1003
- const response = await axios.get(`http://localhost:${port}/api/MediaFileChunker`, {
1004
- validateStatus: status => true,
1005
- timeout: 5000
1006
- });
1007
-
1008
- t.is(response.status, 400, 'Should return 400 for missing parameters');
1009
- t.is(
1010
- response.data,
1011
- 'Please pass a uri and requestId on the query string or in the request body',
1012
- 'Should return proper error message'
1013
- );
1014
- });
1283
+ const response = await axios.get(
1284
+ `http://localhost:${port}/api/MediaFileChunker`,
1285
+ {
1286
+ validateStatus: (status) => true,
1287
+ timeout: 5000,
1288
+ },
1289
+ );
1015
1290
 
1016
- test.serial('should handle empty POST request through legacy MediaFileChunker endpoint', async t => {
1017
- const form = new FormData();
1018
- try {
1019
- await axios.post(
1291
+ t.is(response.status, 400, 'Should return 400 for missing parameters');
1292
+ t.is(
1293
+ response.data,
1294
+ 'Please pass a uri and requestId on the query string or in the request body',
1295
+ 'Should return proper error message',
1296
+ );
1297
+ },
1298
+ );
1299
+
1300
+ test.serial(
1301
+ 'should handle empty POST request through legacy MediaFileChunker endpoint',
1302
+ async (t) => {
1303
+ const form = new FormData();
1304
+ try {
1305
+ await axios.post(`http://localhost:${port}/api/MediaFileChunker`, form, {
1306
+ headers: form.getHeaders(),
1307
+ timeout: 5000,
1308
+ });
1309
+ t.fail('Should have thrown error');
1310
+ } catch (error) {
1311
+ t.is(
1312
+ error.response.status,
1313
+ 400,
1314
+ 'Should return 400 for empty POST request',
1315
+ );
1316
+ t.is(
1317
+ error.response.data,
1318
+ 'No file provided in request',
1319
+ 'Should return proper error message',
1320
+ );
1321
+ }
1322
+ },
1323
+ );
1324
+
1325
+ test.serial(
1326
+ 'should handle complete upload-request-delete-verify sequence through legacy MediaFileChunker endpoint',
1327
+ async (t) => {
1328
+ const testContent = 'test content for legacy sequence';
1329
+ const testHash = 'test-legacy-sequence-hash';
1330
+ const form = new FormData();
1331
+ form.append('file', Buffer.from(testContent), 'sequence-test.txt');
1332
+ form.append('hash', testHash);
1333
+
1334
+ // Upload file with hash through legacy endpoint
1335
+ const uploadResponse = await axios.post(
1020
1336
  `http://localhost:${port}/api/MediaFileChunker`,
1021
1337
  form,
1022
1338
  {
1023
1339
  headers: form.getHeaders(),
1024
- timeout: 5000
1025
- }
1340
+ validateStatus: (status) => true,
1341
+ timeout: 5000,
1342
+ },
1026
1343
  );
1027
- t.fail('Should have thrown error');
1028
- } catch (error) {
1029
- t.is(error.response.status, 400, 'Should return 400 for empty POST request');
1030
- t.is(error.response.data, 'No file provided in request', 'Should return proper error message');
1031
- }
1032
- });
1344
+ t.is(
1345
+ uploadResponse.status,
1346
+ 200,
1347
+ 'Upload should succeed through legacy endpoint',
1348
+ );
1349
+ t.truthy(uploadResponse.data.url, 'Response should contain URL');
1033
1350
 
1034
- test.serial('should handle complete upload-request-delete-verify sequence through legacy MediaFileChunker endpoint', async t => {
1035
- const testContent = 'test content for legacy sequence';
1036
- const testHash = 'test-legacy-sequence-hash';
1037
- const form = new FormData();
1038
- form.append('file', Buffer.from(testContent), 'sequence-test.txt');
1039
- form.append('hash', testHash);
1040
-
1041
- // Upload file with hash through legacy endpoint
1042
- const uploadResponse = await axios.post(
1043
- `http://localhost:${port}/api/MediaFileChunker`,
1044
- form,
1045
- {
1046
- headers: form.getHeaders(),
1047
- validateStatus: status => true,
1048
- timeout: 5000
1049
- }
1050
- );
1051
- t.is(uploadResponse.status, 200, 'Upload should succeed through legacy endpoint');
1052
- t.truthy(uploadResponse.data.url, 'Response should contain URL');
1053
-
1054
- await cleanupUploadedFile(t, uploadResponse.data.url);
1055
-
1056
- // Verify hash is gone by trying to get the file URL through legacy endpoint
1057
- const hashCheckResponse = await axios.get(`http://localhost:${port}/api/MediaFileChunker`, {
1058
- params: {
1059
- hash: testHash,
1060
- checkHash: true
1061
- },
1062
- validateStatus: status => true
1063
- });
1064
- t.is(hashCheckResponse.status, 404, 'Hash should not exist after deletion');
1065
- });
1351
+ await cleanupUploadedFile(t, uploadResponse.data.url);
1352
+
1353
+ // Verify hash is gone by trying to get the file URL through legacy endpoint
1354
+ const hashCheckResponse = await axios.get(
1355
+ `http://localhost:${port}/api/MediaFileChunker`,
1356
+ {
1357
+ params: {
1358
+ hash: testHash,
1359
+ checkHash: true,
1360
+ },
1361
+ validateStatus: (status) => true,
1362
+ },
1363
+ );
1364
+ t.is(hashCheckResponse.status, 404, 'Hash should not exist after deletion');
1365
+ },
1366
+ );
1066
1367
 
1067
1368
  // Cleanup
1068
- test.after.always('cleanup', async t => {
1369
+ test.after.always('cleanup', async (t) => {
1069
1370
  // Add any necessary cleanup here
1070
- });
1371
+ });