@aj-archipelago/cortex 1.3.51 → 1.3.52

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/helper-apps/cortex-file-handler/{.env.test.azure → .env.test.azure.sample} +2 -1
  2. package/helper-apps/cortex-file-handler/{.env.test.gcs → .env.test.gcs.sample} +2 -1
  3. package/helper-apps/cortex-file-handler/{.env.test → .env.test.sample} +2 -1
  4. package/helper-apps/cortex-file-handler/Dockerfile +1 -1
  5. package/helper-apps/cortex-file-handler/INTERFACE.md +178 -0
  6. package/helper-apps/cortex-file-handler/package.json +4 -3
  7. package/helper-apps/cortex-file-handler/scripts/test-azure.sh +3 -0
  8. package/helper-apps/cortex-file-handler/{blobHandler.js → src/blobHandler.js} +167 -99
  9. package/helper-apps/cortex-file-handler/{fileChunker.js → src/fileChunker.js} +11 -24
  10. package/helper-apps/cortex-file-handler/{index.js → src/index.js} +236 -256
  11. package/helper-apps/cortex-file-handler/{services → src/services}/ConversionService.js +39 -18
  12. package/helper-apps/cortex-file-handler/{services → src/services}/FileConversionService.js +7 -3
  13. package/helper-apps/cortex-file-handler/src/services/storage/AzureStorageProvider.js +177 -0
  14. package/helper-apps/cortex-file-handler/src/services/storage/GCSStorageProvider.js +258 -0
  15. package/helper-apps/cortex-file-handler/src/services/storage/LocalStorageProvider.js +182 -0
  16. package/helper-apps/cortex-file-handler/src/services/storage/StorageFactory.js +86 -0
  17. package/helper-apps/cortex-file-handler/src/services/storage/StorageProvider.js +53 -0
  18. package/helper-apps/cortex-file-handler/src/services/storage/StorageService.js +259 -0
  19. package/helper-apps/cortex-file-handler/{start.js → src/start.js} +1 -1
  20. package/helper-apps/cortex-file-handler/src/utils/filenameUtils.js +28 -0
  21. package/helper-apps/cortex-file-handler/tests/FileConversionService.test.js +1 -1
  22. package/helper-apps/cortex-file-handler/tests/blobHandler.test.js +4 -4
  23. package/helper-apps/cortex-file-handler/tests/conversionResilience.test.js +152 -0
  24. package/helper-apps/cortex-file-handler/tests/fileChunker.test.js +2 -28
  25. package/helper-apps/cortex-file-handler/tests/fileUpload.test.js +134 -23
  26. package/helper-apps/cortex-file-handler/tests/getOperations.test.js +307 -0
  27. package/helper-apps/cortex-file-handler/tests/postOperations.test.js +291 -0
  28. package/helper-apps/cortex-file-handler/tests/start.test.js +50 -14
  29. package/helper-apps/cortex-file-handler/tests/storage/AzureStorageProvider.test.js +120 -0
  30. package/helper-apps/cortex-file-handler/tests/storage/GCSStorageProvider.test.js +193 -0
  31. package/helper-apps/cortex-file-handler/tests/storage/LocalStorageProvider.test.js +148 -0
  32. package/helper-apps/cortex-file-handler/tests/storage/StorageFactory.test.js +100 -0
  33. package/helper-apps/cortex-file-handler/tests/storage/StorageService.test.js +113 -0
  34. package/helper-apps/cortex-file-handler/tests/testUtils.helper.js +73 -19
  35. package/lib/entityConstants.js +1 -1
  36. package/package.json +1 -1
  37. /package/helper-apps/cortex-file-handler/{constants.js → src/constants.js} +0 -0
  38. /package/helper-apps/cortex-file-handler/{docHelper.js → src/docHelper.js} +0 -0
  39. /package/helper-apps/cortex-file-handler/{helper.js → src/helper.js} +0 -0
  40. /package/helper-apps/cortex-file-handler/{localFileHandler.js → src/localFileHandler.js} +0 -0
  41. /package/helper-apps/cortex-file-handler/{redis.js → src/redis.js} +0 -0
@@ -0,0 +1,307 @@
1
+ import test from 'ava';
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ import { fileURLToPath } from 'url';
5
+ import { v4 as uuidv4 } from 'uuid';
6
+ import axios from 'axios';
7
+ import FormData from 'form-data';
8
+ import XLSX from 'xlsx';
9
+ import { port } from '../src/start.js';
10
+ import { cleanupHashAndFile, createTestMediaFile } from './testUtils.helper.js';
11
+
12
+ const __filename = fileURLToPath(import.meta.url);
13
+ const __dirname = path.dirname(__filename);
14
+ const baseUrl = `http://localhost:${port}/api/CortexFileHandler`;
15
+
16
+ // Helper function to create test files
17
+ async function createTestFile(content, extension) {
18
+ const testDir = path.join(__dirname, 'test-files');
19
+ if (!fs.existsSync(testDir)) {
20
+ fs.mkdirSync(testDir, { recursive: true });
21
+ }
22
+ // Use a shorter filename to avoid filesystem limits
23
+ const filename = path.join(testDir, `test-${uuidv4().slice(0, 8)}.${extension}`);
24
+ fs.writeFileSync(filename, content);
25
+ return filename;
26
+ }
27
+
28
+ // Helper function to upload file
29
+ async function uploadFile(filePath, requestId = null, hash = null) {
30
+ const form = new FormData();
31
+ form.append('file', fs.createReadStream(filePath));
32
+ if (requestId) form.append('requestId', requestId);
33
+ if (hash) form.append('hash', hash);
34
+
35
+ const response = await axios.post(baseUrl, form, {
36
+ headers: {
37
+ ...form.getHeaders(),
38
+ 'Content-Type': 'multipart/form-data',
39
+ },
40
+ validateStatus: (status) => true,
41
+ timeout: 30000,
42
+ maxContentLength: Infinity,
43
+ maxBodyLength: Infinity,
44
+ });
45
+
46
+ return response;
47
+ }
48
+
49
+ // Setup: Create test directory
50
+ test.before(async (t) => {
51
+ const testDir = path.join(__dirname, 'test-files');
52
+ await fs.promises.mkdir(testDir, { recursive: true });
53
+ t.context = { testDir };
54
+ });
55
+
56
+ // Test: Document processing with save=true
57
+ test.serial('should process document with save=true', async (t) => {
58
+ // Create a minimal XLSX workbook in-memory
59
+ const workbook = XLSX.utils.book_new();
60
+ const worksheet = XLSX.utils.aoa_to_sheet([
61
+ ['Name', 'Score'],
62
+ ['Alice', 10],
63
+ ['Bob', 8],
64
+ ]);
65
+ XLSX.utils.book_append_sheet(workbook, worksheet, 'Sheet1');
66
+
67
+ // Write it to a temp file inside the test directory
68
+ const filePath = path.join(t.context.testDir, `${uuidv4()}.xlsx`);
69
+ XLSX.writeFile(workbook, filePath);
70
+
71
+ const requestId = uuidv4();
72
+ let response;
73
+ let convertedUrl;
74
+
75
+ try {
76
+ // First upload the file
77
+ response = await uploadFile(filePath, requestId);
78
+ t.is(response.status, 200, 'Upload should succeed');
79
+
80
+ // Then process with save=true
81
+ const processResponse = await axios.get(baseUrl, {
82
+ params: {
83
+ uri: response.data.url,
84
+ requestId,
85
+ save: true
86
+ },
87
+ validateStatus: (status) => true
88
+ });
89
+
90
+ t.is(processResponse.status, 200, 'Document processing should succeed');
91
+ t.truthy(processResponse.data.url, 'Should return converted file URL');
92
+ t.true(processResponse.data.url.includes('.csv'), 'Should return a CSV URL');
93
+
94
+ // Store the converted URL for cleanup
95
+ convertedUrl = processResponse.data.url;
96
+
97
+ // Verify the converted file is accessible immediately after conversion
98
+ const fileResponse = await axios.get(convertedUrl, {
99
+ validateStatus: (status) => true
100
+ });
101
+
102
+ t.is(fileResponse.status, 200, 'Converted file should be accessible');
103
+ t.true(fileResponse.data.includes('Name,Score'), 'CSV should contain headers');
104
+ t.true(fileResponse.data.includes('Alice,10'), 'CSV should contain data');
105
+ } finally {
106
+ // Clean up both the original and converted files
107
+ if (response?.data?.url) {
108
+ await cleanupHashAndFile(null, response.data.url, baseUrl);
109
+ }
110
+ if (convertedUrl) {
111
+ await cleanupHashAndFile(null, convertedUrl, baseUrl);
112
+ }
113
+ // Clean up the local file last
114
+ fs.unlinkSync(filePath);
115
+ }
116
+ });
117
+
118
+ // Test: Document processing with save=false
119
+ test.serial('should process document with save=false', async (t) => {
120
+ const fileContent = 'Test document content';
121
+ const filePath = await createTestFile(fileContent, 'txt');
122
+ const requestId = uuidv4();
123
+ let response;
124
+
125
+ try {
126
+ // First upload the file
127
+ response = await uploadFile(filePath, requestId);
128
+ t.is(response.status, 200, 'Upload should succeed');
129
+
130
+ // Then process with save=false
131
+ const processResponse = await axios.get(baseUrl, {
132
+ params: {
133
+ uri: response.data.url,
134
+ requestId,
135
+ save: false
136
+ },
137
+ validateStatus: (status) => true
138
+ });
139
+
140
+ t.is(processResponse.status, 200, 'Document processing should succeed');
141
+ t.true(Array.isArray(processResponse.data), 'Should return array of chunks');
142
+ t.true(processResponse.data.length > 0, 'Should return non-empty chunks');
143
+ // ensure the first chunk contains the right content
144
+ t.true(processResponse.data[0].includes(fileContent), 'First chunk should contain the right content');
145
+ } finally {
146
+ fs.unlinkSync(filePath);
147
+ if (response?.data?.url) {
148
+ await cleanupHashAndFile(null, response.data.url, baseUrl);
149
+ }
150
+ }
151
+ });
152
+
153
+ // Test: Media file chunking
154
+ test.serial('should chunk media file', async (t) => {
155
+ // Create a proper 10-second test audio file (MP3)
156
+ const testDir = path.join(__dirname, 'test-files');
157
+ if (!fs.existsSync(testDir)) {
158
+ fs.mkdirSync(testDir, { recursive: true });
159
+ }
160
+ const filePath = path.join(testDir, `test-${uuidv4()}.mp3`);
161
+
162
+ try {
163
+ await createTestMediaFile(filePath, 10);
164
+
165
+ const requestId = uuidv4();
166
+ let response;
167
+
168
+ try {
169
+ // First upload the file
170
+ response = await uploadFile(filePath, requestId);
171
+ t.is(response.status, 200, 'Upload should succeed');
172
+
173
+ // Then request chunking
174
+ const chunkResponse = await axios.get(baseUrl, {
175
+ params: {
176
+ uri: response.data.url,
177
+ requestId
178
+ },
179
+ validateStatus: (status) => true
180
+ });
181
+
182
+ t.is(chunkResponse.status, 200, 'Chunking should succeed');
183
+ t.true(Array.isArray(chunkResponse.data), 'Should return array of chunks');
184
+ t.true(chunkResponse.data.length > 0, 'Should return non-empty chunks');
185
+
186
+ // Verify each chunk has required properties
187
+ chunkResponse.data.forEach(chunk => {
188
+ t.truthy(chunk.uri, 'Chunk should have URI');
189
+ t.true(typeof chunk.offset === 'number', 'Chunk should have a numeric offset');
190
+ });
191
+ } finally {
192
+ if (response?.data?.url) {
193
+ await cleanupHashAndFile(null, response.data.url, baseUrl);
194
+ }
195
+ }
196
+ } finally {
197
+ // Clean up the test file
198
+ if (fs.existsSync(filePath)) {
199
+ fs.unlinkSync(filePath);
200
+ }
201
+ }
202
+ });
203
+
204
+ // Test: Remote file fetching with fetch parameter
205
+ test.serial('should fetch remote file', async (t) => {
206
+ const requestId = uuidv4();
207
+ const remoteUrl = 'https://example.com/test.txt';
208
+
209
+ const response = await axios.get(baseUrl, {
210
+ params: {
211
+ fetch: remoteUrl,
212
+ requestId
213
+ },
214
+ validateStatus: (status) => true
215
+ });
216
+
217
+ t.is(response.status, 400, 'Should reject invalid URL');
218
+ t.is(response.data, 'Invalid or inaccessible URL', 'Should return correct error message');
219
+ });
220
+
221
+ // Test: Redis caching behavior for remote files
222
+ test.serial('should cache remote files in Redis', async (t) => {
223
+ const requestId = uuidv4();
224
+ const hash = 'test-cache-' + uuidv4();
225
+
226
+ // First request should cache the file
227
+ const firstResponse = await axios.get(baseUrl, {
228
+ params: {
229
+ fetch: 'https://example.com/test.txt',
230
+ requestId,
231
+ hash,
232
+ timeout: 10000
233
+ },
234
+ validateStatus: (status) => true
235
+ });
236
+
237
+ // Second request should return cached result
238
+ const secondResponse = await axios.get(baseUrl, {
239
+ params: {
240
+ hash,
241
+ checkHash: true
242
+ },
243
+ validateStatus: (status) => true
244
+ });
245
+
246
+ t.is(secondResponse.status, 404, 'Should return 404 for invalid URL');
247
+ });
248
+
249
+ // Test: Error cases for invalid URLs
250
+ test.serial('should handle invalid URLs', async (t) => {
251
+ const requestId = uuidv4();
252
+ const invalidUrls = [
253
+ 'not-a-url',
254
+ 'http://',
255
+ 'https://',
256
+ 'ftp://invalid',
257
+ 'file:///nonexistent'
258
+ ];
259
+
260
+ for (const url of invalidUrls) {
261
+ const response = await axios.get(baseUrl, {
262
+ params: {
263
+ uri: url,
264
+ requestId
265
+ },
266
+ validateStatus: (status) => true
267
+ });
268
+
269
+ t.is(response.status, 400, `Should reject invalid URL: ${url}`);
270
+ t.true(response.data.includes('Invalid') || response.data.includes('Error'), 'Should return error message');
271
+ }
272
+ });
273
+
274
+ // Test: Long filename handling
275
+ test.serial('should handle long filenames', async (t) => {
276
+ const fileContent = 'Test content';
277
+ const filePath = await createTestFile(fileContent, 'txt');
278
+ const requestId = uuidv4();
279
+ let response;
280
+
281
+ try {
282
+ // First upload the file
283
+ response = await uploadFile(filePath, requestId);
284
+ t.is(response.status, 200, 'Upload should succeed');
285
+
286
+ // Create a URL with a very long filename
287
+ const longFilename = 'a'.repeat(1100) + '.txt';
288
+ const longUrl = response.data.url.replace(/[^/]+$/, longFilename);
289
+
290
+ // Try to process the file with the long filename
291
+ const processResponse = await axios.get(baseUrl, {
292
+ params: {
293
+ uri: longUrl,
294
+ requestId
295
+ },
296
+ validateStatus: (status) => true
297
+ });
298
+
299
+ t.is(processResponse.status, 400, 'Should reject URL with too long filename');
300
+ t.is(processResponse.data, 'URL pathname is too long', 'Should return correct error message');
301
+ } finally {
302
+ fs.unlinkSync(filePath);
303
+ if (response?.data?.url) {
304
+ await cleanupHashAndFile(null, response.data.url, baseUrl);
305
+ }
306
+ }
307
+ });
@@ -0,0 +1,291 @@
1
+ import test from 'ava';
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ import { fileURLToPath } from 'url';
5
+ import { v4 as uuidv4 } from 'uuid';
6
+ import axios from 'axios';
7
+ import FormData from 'form-data';
8
+ import { port } from '../src/start.js';
9
+ import { cleanupHashAndFile, getFolderNameFromUrl } from './testUtils.helper.js';
10
+
11
+ const __filename = fileURLToPath(import.meta.url);
12
+ const __dirname = path.dirname(__filename);
13
+ const baseUrl = `http://localhost:${port}/api/CortexFileHandler`;
14
+
15
+ // Helper function to determine if GCS is configured
16
+ function isGCSConfigured() {
17
+ return (
18
+ process.env.GCP_SERVICE_ACCOUNT_KEY_BASE64 ||
19
+ process.env.GCP_SERVICE_ACCOUNT_KEY
20
+ );
21
+ }
22
+
23
+ // Helper function to create test files
24
+ async function createTestFile(content, extension) {
25
+ const testDir = path.join(__dirname, 'test-files');
26
+ if (!fs.existsSync(testDir)) {
27
+ fs.mkdirSync(testDir, { recursive: true });
28
+ }
29
+ const filename = path.join(testDir, `${uuidv4()}.${extension}`);
30
+ fs.writeFileSync(filename, content);
31
+ return filename;
32
+ }
33
+
34
+ // Helper function to upload file
35
+ async function uploadFile(filePath, requestId = null, hash = null) {
36
+ const form = new FormData();
37
+ form.append('file', fs.createReadStream(filePath));
38
+ if (requestId) form.append('requestId', requestId);
39
+ if (hash) form.append('hash', hash);
40
+
41
+ const response = await axios.post(baseUrl, form, {
42
+ headers: {
43
+ ...form.getHeaders(),
44
+ 'Content-Type': 'multipart/form-data',
45
+ },
46
+ validateStatus: (status) => true,
47
+ timeout: 30000,
48
+ maxContentLength: Infinity,
49
+ maxBodyLength: Infinity,
50
+ });
51
+
52
+ return response;
53
+ }
54
+
55
+ // Setup: Create test directory
56
+ test.before(async (t) => {
57
+ const testDir = path.join(__dirname, 'test-files');
58
+ await fs.promises.mkdir(testDir, { recursive: true });
59
+ t.context = { testDir };
60
+ });
61
+
62
+ // Test: Upload with hash and verify Redis storage
63
+ test.serial('should store file metadata in Redis with hash', async (t) => {
64
+ const fileContent = 'test content';
65
+ const filePath = await createTestFile(fileContent, 'txt');
66
+ const requestId = uuidv4();
67
+ const hash = 'test-hash-' + uuidv4();
68
+ let response;
69
+
70
+ try {
71
+ response = await uploadFile(filePath, requestId, hash);
72
+ t.is(response.status, 200, 'Upload should succeed');
73
+ t.truthy(response.data.url, 'Should have file URL');
74
+ t.is(response.data.hash, hash, 'Should return correct hash');
75
+
76
+ // Wait for Redis operations to complete
77
+ await new Promise(resolve => setTimeout(resolve, 1000));
78
+
79
+ // Verify hash exists in Redis
80
+ const checkResponse = await axios.get(baseUrl, {
81
+ params: {
82
+ hash,
83
+ checkHash: true,
84
+ },
85
+ validateStatus: (status) => true,
86
+ });
87
+
88
+ t.is(checkResponse.status, 200, 'Hash should exist in Redis');
89
+ t.truthy(checkResponse.data.url, 'Hash check should return URL');
90
+ t.is(checkResponse.data.url, response.data.url, 'Hash check should return correct URL');
91
+ } finally {
92
+ fs.unlinkSync(filePath);
93
+ if (response?.data?.url) {
94
+ await cleanupHashAndFile(hash, response.data.url, baseUrl);
95
+ }
96
+ }
97
+ });
98
+
99
+ // Test: Upload with GCS backup verification
100
+ test.serial('should create GCS backup when configured', async (t) => {
101
+ if (!isGCSConfigured()) {
102
+ t.pass('Skipping test - GCS not configured');
103
+ return;
104
+ }
105
+
106
+ const fileContent = 'test content';
107
+ const filePath = await createTestFile(fileContent, 'txt');
108
+ const requestId = uuidv4();
109
+ let response;
110
+
111
+ try {
112
+ response = await uploadFile(filePath, requestId);
113
+ t.is(response.status, 200, 'Upload should succeed');
114
+ t.truthy(response.data.url, 'Should have primary storage URL');
115
+ t.truthy(response.data.gcs, 'Should have GCS backup URL');
116
+ t.true(response.data.gcs.startsWith('gs://'), 'GCS URL should use gs:// protocol');
117
+
118
+ // Verify file exists in both storages
119
+ const primaryResponse = await axios.get(response.data.url);
120
+ t.is(primaryResponse.status, 200, 'Primary file should be accessible');
121
+ t.is(primaryResponse.data, fileContent, 'Primary file content should match');
122
+
123
+ // GCS file should be accessible through the primary URL
124
+ // since we can't directly access gs:// URLs
125
+ const gcsResponse = await axios.get(response.data.url);
126
+ t.is(gcsResponse.status, 200, 'GCS file should be accessible');
127
+ t.is(gcsResponse.data, fileContent, 'GCS file content should match');
128
+ } finally {
129
+ fs.unlinkSync(filePath);
130
+ if (response?.data?.url) {
131
+ await cleanupHashAndFile(null, response.data.url, baseUrl);
132
+ }
133
+ }
134
+ });
135
+
136
+ // Test: Upload with large file
137
+ test.serial('should handle large file upload', async (t) => {
138
+ const largeContent = 'x'.repeat(10 * 1024 * 1024); // 10MB
139
+ const filePath = await createTestFile(largeContent, 'txt');
140
+ const requestId = uuidv4();
141
+ let response;
142
+
143
+ try {
144
+ response = await uploadFile(filePath, requestId);
145
+ t.is(response.status, 200, 'Large file upload should succeed');
146
+ t.truthy(response.data.url, 'Should have file URL');
147
+
148
+ // Verify file content
149
+ const fileResponse = await axios.get(response.data.url);
150
+ t.is(fileResponse.status, 200, 'File should be accessible');
151
+ t.is(fileResponse.data.length, largeContent.length, 'File size should match');
152
+ } finally {
153
+ fs.unlinkSync(filePath);
154
+ if (response?.data?.url) {
155
+ await cleanupHashAndFile(null, response.data.url, baseUrl);
156
+ }
157
+ }
158
+ });
159
+
160
+ // Test: Upload with special characters in filename
161
+ test('should handle special characters in filename', async (t) => {
162
+ const fileContent = 'test content';
163
+ const specialFilename = `test file with spaces and special chars !@#$%^&*()_+-=[]{}|;:,.<>?${uuidv4()}.txt`;
164
+ const filePath = await createTestFile(fileContent, specialFilename);
165
+ const requestId = uuidv4();
166
+ let response;
167
+
168
+ try {
169
+ response = await uploadFile(filePath, requestId);
170
+ t.is(response.status, 200, 'Upload should succeed');
171
+ t.truthy(response.data.url, 'Should have file URL');
172
+ t.truthy(response.data.filename, 'Should have filename in response');
173
+
174
+ // Verify file is accessible
175
+ const fileResponse = await axios.get(response.data.url);
176
+ t.is(fileResponse.status, 200, 'File should be accessible');
177
+ t.is(fileResponse.data, fileContent, 'File content should match');
178
+ } finally {
179
+ fs.unlinkSync(filePath);
180
+ if (response?.data?.url) {
181
+ await cleanupHashAndFile(null, response.data.url, baseUrl);
182
+ }
183
+ }
184
+ });
185
+
186
+ // Test: Upload with concurrent requests
187
+ test.serial('should handle concurrent uploads', async (t) => {
188
+ const requestId = uuidv4();
189
+ const uploads = [];
190
+ const numUploads = 5;
191
+ let responses = []; // Move declaration outside try block
192
+
193
+ // Create and upload multiple files concurrently
194
+ for (let i = 0; i < numUploads; i++) {
195
+ const fileContent = `test content ${i}`;
196
+ const filePath = await createTestFile(fileContent, 'txt');
197
+ uploads.push({
198
+ filePath,
199
+ promise: uploadFile(filePath, requestId)
200
+ });
201
+ }
202
+
203
+ try {
204
+ // Wait for all uploads to complete
205
+ responses = await Promise.all(uploads.map(u => u.promise));
206
+
207
+ // Verify all uploads succeeded
208
+ responses.forEach((response, i) => {
209
+ t.is(response.status, 200, `Upload ${i} should succeed`);
210
+ t.truthy(response.data.url, `Upload ${i} should have URL`);
211
+ });
212
+
213
+ // Verify all files are accessible
214
+ for (const response of responses) {
215
+ const fileResponse = await axios.get(response.data.url);
216
+ t.is(fileResponse.status, 200, 'File should be accessible');
217
+ }
218
+ } finally {
219
+ // Cleanup all files
220
+ for (const upload of uploads) {
221
+ fs.unlinkSync(upload.filePath);
222
+ }
223
+ // Cleanup uploaded files
224
+ for (const response of responses) {
225
+ if (response?.data?.url) {
226
+ await cleanupHashAndFile(null, response.data.url, baseUrl);
227
+ }
228
+ }
229
+ }
230
+ });
231
+
232
+ // Test: Upload with missing file
233
+ test.serial('should handle missing file in request', async (t) => {
234
+ const form = new FormData();
235
+ form.append('requestId', uuidv4());
236
+
237
+ const response = await axios.post(baseUrl, form, {
238
+ headers: {
239
+ ...form.getHeaders(),
240
+ 'Content-Type': 'multipart/form-data',
241
+ },
242
+ validateStatus: (status) => true,
243
+ });
244
+
245
+ t.is(response.status, 400, 'Should reject request without file');
246
+ t.is(response.data, 'No file provided in request', 'Should return correct error message');
247
+ });
248
+
249
+ // Test: Upload with empty file
250
+ test.serial('should handle empty file upload', async (t) => {
251
+ const filePath = await createTestFile('', 'txt');
252
+ const requestId = uuidv4();
253
+ let response;
254
+
255
+ try {
256
+ response = await uploadFile(filePath, requestId);
257
+ t.is(response.status, 400, 'Should reject empty file');
258
+ t.is(response.data, 'Invalid file: file is empty', 'Should return correct error message');
259
+ } finally {
260
+ fs.unlinkSync(filePath);
261
+ }
262
+ });
263
+
264
+ // Test: Upload without requestId should generate one
265
+ test.serial('should generate requestId when not provided', async (t) => {
266
+ const fileContent = 'test content';
267
+ const filePath = await createTestFile(fileContent, 'txt');
268
+ let response;
269
+
270
+ try {
271
+ response = await uploadFile(filePath);
272
+ t.is(response.status, 200, 'Upload should succeed without requestId');
273
+ t.truthy(response.data.url, 'Should have file URL');
274
+
275
+ // Extract requestId from the URL
276
+ const urlParts = response.data.url.split('/');
277
+ const requestId = urlParts[urlParts.length - 2]; // requestId is the second-to-last part of the URL
278
+ t.truthy(requestId, 'URL should contain a requestId');
279
+ t.true(requestId.length > 0, 'requestId should not be empty');
280
+
281
+ // Verify file is accessible
282
+ const fileResponse = await axios.get(response.data.url);
283
+ t.is(fileResponse.status, 200, 'File should be accessible');
284
+ t.is(fileResponse.data, fileContent, 'File content should match');
285
+ } finally {
286
+ fs.unlinkSync(filePath);
287
+ if (response?.data?.url) {
288
+ await cleanupHashAndFile(null, response.data.url, baseUrl);
289
+ }
290
+ }
291
+ });