@aj-archipelago/cortex 1.3.10 → 1.3.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/config.js +15 -0
  2. package/helper-apps/cortex-file-handler/.env.test +7 -0
  3. package/helper-apps/cortex-file-handler/.env.test.azure +6 -0
  4. package/helper-apps/cortex-file-handler/.env.test.gcs +9 -0
  5. package/helper-apps/cortex-file-handler/blobHandler.js +263 -179
  6. package/helper-apps/cortex-file-handler/constants.js +107 -0
  7. package/helper-apps/cortex-file-handler/docHelper.js +4 -1
  8. package/helper-apps/cortex-file-handler/fileChunker.js +171 -109
  9. package/helper-apps/cortex-file-handler/helper.js +39 -17
  10. package/helper-apps/cortex-file-handler/index.js +230 -138
  11. package/helper-apps/cortex-file-handler/localFileHandler.js +21 -3
  12. package/helper-apps/cortex-file-handler/package-lock.json +2622 -51
  13. package/helper-apps/cortex-file-handler/package.json +24 -4
  14. package/helper-apps/cortex-file-handler/redis.js +9 -18
  15. package/helper-apps/cortex-file-handler/scripts/setup-azure-container.js +22 -0
  16. package/helper-apps/cortex-file-handler/scripts/setup-test-containers.js +49 -0
  17. package/helper-apps/cortex-file-handler/scripts/test-azure.sh +34 -0
  18. package/helper-apps/cortex-file-handler/scripts/test-gcs.sh +49 -0
  19. package/helper-apps/cortex-file-handler/start.js +26 -4
  20. package/helper-apps/cortex-file-handler/tests/docHelper.test.js +148 -0
  21. package/helper-apps/cortex-file-handler/tests/fileChunker.test.js +322 -0
  22. package/helper-apps/cortex-file-handler/tests/start.test.js +928 -0
  23. package/helper-apps/cortex-realtime-voice-server/client/src/chat/components/ScreenshotCapture.tsx +57 -9
  24. package/helper-apps/cortex-realtime-voice-server/src/SocketServer.ts +35 -22
  25. package/helper-apps/cortex-realtime-voice-server/src/Tools.ts +65 -14
  26. package/helper-apps/cortex-realtime-voice-server/src/realtime/client.ts +10 -10
  27. package/helper-apps/cortex-realtime-voice-server/src/realtime/socket.ts +2 -1
  28. package/package.json +1 -1
  29. package/pathways/system/entity/sys_entity_continue.js +1 -1
  30. package/pathways/system/entity/sys_entity_start.js +1 -0
  31. package/pathways/system/entity/sys_generator_reasoning.js +1 -1
  32. package/pathways/system/entity/sys_generator_video_vision.js +2 -1
  33. package/pathways/system/entity/sys_router_tool.js +6 -4
  34. package/pathways/system/rest_streaming/sys_openai_chat_o1.js +19 -0
  35. package/pathways/system/rest_streaming/sys_openai_chat_o1_mini.js +19 -0
  36. package/server/plugins/openAiReasoningPlugin.js +11 -2
  37. package/server/plugins/openAiWhisperPlugin.js +9 -13
@@ -1,28 +1,23 @@
1
- import { downloadFile, processYoutubeUrl, splitMediaFile } from './fileChunker.js';
2
- import { saveFileToBlob, deleteBlob, uploadBlob, cleanup, cleanupGCS, gcsUrlExists } from './blobHandler.js';
1
+ import { downloadFile, splitMediaFile } from './fileChunker.js';
2
+ import { saveFileToBlob, deleteBlob, deleteGCS, uploadBlob, cleanup, cleanupGCS, gcsUrlExists, ensureGCSUpload, gcs, AZURE_STORAGE_CONTAINER_NAME } from './blobHandler.js';
3
3
  import { cleanupRedisFileStoreMap, getFileStoreMap, publishRequestProgress, removeFromFileStoreMap, setFileStoreMap } from './redis.js';
4
- import { deleteTempPath, ensureEncoded, isValidYoutubeUrl } from './helper.js';
4
+ import { ensureEncoded, ensureFileExtension } from './helper.js';
5
5
  import { moveFileToPublicFolder, deleteFolder, cleanupLocal } from './localFileHandler.js';
6
6
  import { documentToText, easyChunker } from './docHelper.js';
7
+ import { DOC_EXTENSIONS, isAcceptedMimeType } from './constants.js';
7
8
  import path from 'path';
8
9
  import os from 'os';
9
10
  import { v4 as uuidv4 } from 'uuid';
10
11
  import fs from 'fs';
11
12
  import http from 'http';
12
13
  import https from 'https';
13
- import axios from "axios";
14
- import { pipeline } from "stream";
15
- import { promisify } from "util";
16
- const pipelineUtility = promisify(pipeline); // To pipe streams using async/await
17
-
18
- const DOC_EXTENSIONS = [".txt", ".json", ".csv", ".md", ".xml", ".js", ".html", ".css", '.pdf', '.docx', '.xlsx', '.csv'];
19
14
 
20
15
  const useAzure = process.env.AZURE_STORAGE_CONNECTION_STRING ? true : false;
21
- console.log(useAzure ? 'Using Azure Storage' : 'Using local file system');
22
16
 
17
+ console.log(`Storage configuration - ${useAzure ? 'Azure' : 'Local'} Storage${gcs ? ' and Google Cloud Storage' : ''}`);
23
18
 
24
19
  let isCleanupRunning = false;
25
- async function cleanupInactive() {
20
+ async function cleanupInactive(context) {
26
21
  try {
27
22
  if (isCleanupRunning) { return; } //no need to cleanup every call
28
23
  isCleanupRunning = true;
@@ -52,7 +47,7 @@ async function cleanupInactive() {
52
47
 
53
48
  try {
54
49
  if (cleanedAzure && cleanedAzure.length > 0) {
55
- await cleanup(cleanedAzure);
50
+ await cleanup(context, cleanedAzure);
56
51
  }
57
52
  } catch (error) {
58
53
  console.log('Error occurred during azure cleanup:', error);
@@ -83,94 +78,181 @@ async function cleanupInactive() {
83
78
 
84
79
  async function urlExists(url) {
85
80
  if(!url) return false;
86
- const httpModule = url.startsWith('https') ? https : http;
87
81
 
88
- return new Promise((resolve) => {
89
- httpModule
90
- .get(url, function (response) {
91
- // Check if the response status is OK
92
- resolve(response.statusCode === 200);
93
- })
94
- .on('error', function () {
95
- resolve(false);
82
+ try {
83
+ // Basic URL validation
84
+ const urlObj = new URL(url);
85
+ if (!['http:', 'https:'].includes(urlObj.protocol)) {
86
+ throw new Error('Invalid protocol - only HTTP and HTTPS are supported');
87
+ }
88
+
89
+ const httpModule = urlObj.protocol === 'https:' ? https : http;
90
+
91
+ return new Promise((resolve) => {
92
+ const request = httpModule.request(url, { method: 'HEAD' }, function(response) {
93
+ if (response.statusCode >= 200 && response.statusCode < 400) {
94
+ const contentType = response.headers['content-type'];
95
+ const cleanContentType = contentType ? contentType.split(';')[0].trim() : '';
96
+ // Check if the content type is one we accept
97
+ if (cleanContentType && isAcceptedMimeType(cleanContentType)) {
98
+ resolve({ valid: true, contentType: cleanContentType });
99
+ } else {
100
+ console.log(`Unsupported content type: ${contentType}`);
101
+ resolve({ valid: false });
102
+ }
103
+ } else {
104
+ resolve({ valid: false });
105
+ }
96
106
  });
97
- });
107
+
108
+ request.on('error', function(err) {
109
+ console.error('URL validation error:', err.message);
110
+ resolve({ valid: false });
111
+ });
112
+
113
+ request.end();
114
+ });
115
+ } catch (error) {
116
+ console.error('URL validation error:', error.message);
117
+ return { valid: false };
118
+ }
98
119
  }
99
120
 
100
-
101
- async function main(context, req) {
102
- context.log('Starting req processing..');
121
+ async function CortexFileHandler(context, req) {
122
+ const { uri, requestId, save, hash, checkHash, clearHash, fetch, load, restore } = req.body?.params || req.query;
123
+ const operation = save ? 'save' :
124
+ checkHash ? 'checkHash' :
125
+ clearHash ? 'clearHash' :
126
+ fetch || load || restore ? 'remoteFile' :
127
+ req.method.toLowerCase() === 'delete' || req.query.operation === 'delete' ? 'delete' :
128
+ uri ? (DOC_EXTENSIONS.some(ext => uri.toLowerCase().endsWith(ext)) ? 'document_processing' : 'media_chunking') :
129
+ 'upload';
130
+
131
+ context.log(`Processing ${req.method} request - ${requestId ? `requestId: ${requestId}, ` : ''}${uri ? `uri: ${uri}, ` : ''}${hash ? `hash: ${hash}, ` : ''}operation: ${operation}`);
103
132
 
104
- cleanupInactive(); //trigger & no need to wait for it
133
+ cleanupInactive(context); //trigger & no need to wait for it
105
134
 
106
135
  // Clean up blob when request delete which means processing marked completed
107
- if (req.method.toLowerCase() === `delete`) {
108
- const { requestId } = req.query;
109
- if (!requestId) {
136
+ if (operation === 'delete') {
137
+ const deleteRequestId = req.query.requestId || requestId;
138
+ if (!deleteRequestId) {
110
139
  context.res = {
111
140
  status: 400,
112
141
  body: "Please pass a requestId on the query string"
113
142
  };
114
143
  return;
115
144
  }
116
- const result = useAzure ? await deleteBlob(requestId) : await deleteFolder(requestId);
145
+
146
+ // Delete from Azure/Local storage
147
+ const azureResult = useAzure ? await deleteBlob(deleteRequestId) : await deleteFolder(deleteRequestId);
148
+ const gcsResult = [];
149
+ if (gcs) {
150
+ for (const blobName of azureResult) {
151
+ gcsResult.push(...await deleteGCS(blobName));
152
+ }
153
+ }
154
+
117
155
  context.res = {
118
- body: result
156
+ status: 200,
157
+ body: { body: [...azureResult, ...gcsResult] }
119
158
  };
120
159
  return;
121
160
  }
122
161
 
123
- const { uri, requestId, save, hash, checkHash, clearHash, fetch, load, restore } = req.body?.params || req.query;
124
-
125
- const filepond = fetch || restore || load;
126
- if (req.method.toLowerCase() === `get` && filepond) {
127
- context.log(`Remote file: ${filepond}`);
128
- // Check if file already exists (using hash as the key)
129
- const exists = await getFileStoreMap(filepond);
130
- if(exists){
131
- context.res = {
132
- status: 200,
133
- body: exists // existing file URL
134
- };
135
- return;
136
- }
162
+ const remoteUrl = fetch || restore || load;
163
+ if (req.method.toLowerCase() === `get` && remoteUrl) {
164
+ context.log(`Remote file: ${remoteUrl}`);
165
+ let filename; // Declare filename outside try block
166
+ try {
167
+ // Validate URL format and accessibility
168
+ const urlCheck = await urlExists(remoteUrl);
169
+ if (!urlCheck.valid) {
170
+ context.res = {
171
+ status: 400,
172
+ body: 'Invalid or inaccessible URL'
173
+ };
174
+ return;
175
+ }
137
176
 
138
- // Check if it's a youtube url
139
- let youtubeDownloadedFile = null;
140
- if(isValidYoutubeUrl(filepond)){
141
- youtubeDownloadedFile = await processYoutubeUrl(filepond, true);
142
- }
143
- const filename = path.join(os.tmpdir(), path.basename(youtubeDownloadedFile || filepond));
144
- // Download the remote file to a local/temporary location keep name & ext
145
- if(!youtubeDownloadedFile){
146
- const response = await axios.get(filepond, { responseType: "stream" });
147
- await pipelineUtility(response.data, fs.createWriteStream(filename));
148
- }
177
+ // Check if file already exists (using hash as the key)
178
+ let exists = await getFileStoreMap(remoteUrl);
179
+ if(exists){
180
+ context.res = {
181
+ status: 200,
182
+ body: exists
183
+ };
184
+ //update redis timestamp with current time
185
+ await setFileStoreMap(remoteUrl, exists);
186
+ return;
187
+ }
149
188
 
150
-
151
- const res = await uploadBlob(context, null, !useAzure, true, filename);
152
- context.log(`File uploaded: ${JSON.stringify(res)}`);
189
+ // Download the file first
190
+ const urlObj = new URL(remoteUrl);
191
+ let originalFileName = path.basename(urlObj.pathname);
192
+ if (!originalFileName || originalFileName === '') {
193
+ originalFileName = urlObj.hostname;
194
+ }
195
+
196
+ // Ensure the filename has the correct extension based on content type
197
+ originalFileName = ensureFileExtension(originalFileName, urlCheck.contentType);
198
+
199
+ const maxLength = 200; // Set the maximum length for the filename
200
+ let truncatedFileName = originalFileName;
201
+ if (originalFileName.length > maxLength) {
202
+ const extension = path.extname(originalFileName);
203
+ const basename = path.basename(originalFileName, extension);
204
+ truncatedFileName = basename.substring(0, maxLength - extension.length) + extension;
205
+ }
153
206
 
154
- //Update Redis (using hash as the key)
155
- await setFileStoreMap(filepond, res);
207
+ // Use the original-truncated file name when saving the downloaded file
208
+ filename = path.join(os.tmpdir(), truncatedFileName);
209
+ await downloadFile(remoteUrl, filename);
210
+
211
+ // Now upload the downloaded file
212
+ const res = await uploadBlob(context, null, !useAzure, filename, remoteUrl);
156
213
 
157
- // Return the file URL
158
- context.res = {
159
- status: 200,
160
- body: res,
161
- };
214
+ //Update Redis (using hash as the key)
215
+ await setFileStoreMap(remoteUrl, res);
162
216
 
217
+ // Return the file URL
218
+ context.res = {
219
+ status: 200,
220
+ body: res,
221
+ };
222
+ } catch (error) {
223
+ context.log("Error processing remote file request:", error);
224
+ context.res = {
225
+ status: 500,
226
+ body: `Error processing file: ${error.message}`
227
+ };
228
+ } finally {
229
+ // Cleanup temp file if it exists
230
+ try {
231
+ if (filename && fs.existsSync(filename)) {
232
+ fs.unlinkSync(filename);
233
+ }
234
+ } catch (err) {
235
+ context.log("Error cleaning up temp file:", err);
236
+ }
237
+ }
163
238
  return;
164
239
  }
165
240
 
166
241
  if(hash && clearHash){
167
242
  try {
168
243
  const hashValue = await getFileStoreMap(hash);
169
- await removeFromFileStoreMap(hash);
170
- context.res = {
171
- status: 200,
172
- body: hashValue ? `Hash ${hash} removed` : `Hash ${hash} not found`
173
- };
244
+ if (hashValue) {
245
+ await removeFromFileStoreMap(hash);
246
+ context.res = {
247
+ status: 200,
248
+ body: `Hash ${hash} removed`
249
+ };
250
+ } else {
251
+ context.res = {
252
+ status: 404,
253
+ body: `Hash ${hash} not found`
254
+ };
255
+ }
174
256
  } catch (error) {
175
257
  context.res = {
176
258
  status: 500,
@@ -178,37 +260,53 @@ async function main(context, req) {
178
260
  };
179
261
  console.log('Error occurred during hash cleanup:', error);
180
262
  }
181
- return
263
+ return;
182
264
  }
183
265
 
184
266
  if(hash && checkHash){ //check if hash exists
185
- context.log(`Checking hash: ${hash}`);
186
- const result = await getFileStoreMap(hash);
267
+ let hashResult = await getFileStoreMap(hash);
187
268
 
188
- if(result){
189
- const exists = await urlExists(result?.url);
190
- const gcsExists = await gcsUrlExists(result?.gcs);
269
+ if(hashResult){
270
+ context.log(`File exists in map: ${hash}`);
271
+ const exists = await urlExists(hashResult?.url);
191
272
 
192
- if(!exists || !gcsExists){
273
+ if(!exists.valid){
274
+ context.log(`File is not in storage. Removing from map: ${hash}`);
193
275
  await removeFromFileStoreMap(hash);
276
+ context.res = {
277
+ status: 404,
278
+ body: `Hash ${hash} not found`
279
+ };
194
280
  return;
195
281
  }
196
282
 
197
- context.log(`Hash exists: ${hash}`);
283
+ if (gcs) {
284
+ const gcsExists = await gcsUrlExists(hashResult?.gcs);
285
+ if(!gcsExists){
286
+ hashResult = await ensureGCSUpload(context, hashResult);
287
+ }
288
+ }
289
+
198
290
  //update redis timestamp with current time
199
- await setFileStoreMap(hash, result);
291
+ await setFileStoreMap(hash, hashResult);
292
+
293
+ context.res = {
294
+ status: 200,
295
+ body: hashResult
296
+ };
297
+ return;
200
298
  }
299
+
201
300
  context.res = {
202
- body: result
301
+ status: 404,
302
+ body: `Hash ${hash} not found`
203
303
  };
204
304
  return;
205
305
  }
206
306
 
207
307
  if (req.method.toLowerCase() === `post`) {
208
- const { useGoogle } = req.body?.params || req.query;
209
- const { url } = await uploadBlob(context, req, !useAzure, useGoogle, null, hash);
210
- context.log(`File url: ${url}`);
211
- if(hash && context?.res?.body){ //save hash after upload
308
+ await uploadBlob(context, req, !useAzure, null, hash);
309
+ if(hash && context?.res?.body){
212
310
  await setFileStoreMap(hash, context.res.body);
213
311
  }
214
312
  return
@@ -227,8 +325,6 @@ async function main(context, req) {
227
325
  let numberOfChunks;
228
326
 
229
327
  let file = ensureEncoded(uri); // encode url to handle special characters
230
- let folder;
231
- const isYoutubeUrl = isValidYoutubeUrl(uri);
232
328
 
233
329
  const result = [];
234
330
 
@@ -238,20 +334,24 @@ async function main(context, req) {
238
334
  await publishRequestProgress({ requestId, progress, completedCount, totalCount, numberOfChunks, data });
239
335
  }
240
336
 
241
- const isDocument = DOC_EXTENSIONS.some(ext => uri.toLowerCase().endsWith(ext));
242
-
243
337
  try {
244
- if (isDocument) {
245
- const extension = path.extname(uri).toLowerCase();
246
- const file = path.join(os.tmpdir(), `${uuidv4()}${extension}`);
247
- await downloadFile(uri, file)
248
- const text = await documentToText(file);
338
+ // Parse URL and get pathname without query parameters for extension check
339
+ const urlObj = new URL(uri);
340
+ const pathWithoutQuery = urlObj.pathname;
341
+
342
+ if (DOC_EXTENSIONS.some(ext => pathWithoutQuery.toLowerCase().endsWith(ext))) {
343
+ const extension = path.extname(pathWithoutQuery).toLowerCase();
344
+ const tempDir = path.join(os.tmpdir(), `${uuidv4()}`);
345
+ fs.mkdirSync(tempDir);
346
+ const downloadedFile = path.join(tempDir, `${uuidv4()}${extension}`);
347
+ await downloadFile(uri, downloadedFile);
348
+ const text = await documentToText(downloadedFile);
249
349
  let tmpPath;
250
350
 
251
- try{
351
+ try {
252
352
  if (save) {
253
353
  const fileName = `${uuidv4()}.txt`; // generate unique file name
254
- const filePath = path.join(os.tmpdir(), fileName);
354
+ const filePath = path.join(tempDir, fileName);
255
355
  tmpPath = filePath;
256
356
  fs.writeFileSync(filePath, text); // write text to file
257
357
 
@@ -262,79 +362,73 @@ async function main(context, req) {
262
362
  } else {
263
363
  result.push(...easyChunker(text));
264
364
  }
265
- }catch(err){
365
+ } catch(err) {
266
366
  console.log(`Error saving file ${uri} with request id ${requestId}:`, err);
267
- }finally{
268
- try{
367
+ } finally {
368
+ try {
269
369
  // delete temporary files
270
370
  tmpPath && fs.unlinkSync(tmpPath);
271
- file && fs.unlinkSync(file);
272
- console.log(`Cleaned temp files ${tmpPath}, ${file}`);
273
- }catch(err){
274
- console.log(`Error cleaning temp files ${tmpPath}, ${file}:`, err);
371
+ downloadedFile && fs.unlinkSync(downloadedFile);
372
+ console.log(`Cleaned temp files ${tmpPath}, ${downloadedFile}`);
373
+ } catch(err) {
374
+ console.log(`Error cleaning temp files ${tmpPath}, ${downloadedFile}:`, err);
275
375
  }
276
376
 
277
- try{
377
+ try {
278
378
  //delete uploaded prev nontext file
279
- //check cleanup for whisper temp uploaded files url
280
- const regex = /whispertempfiles\/([a-z0-9-]+)/;
379
+ //check cleanup for uploaded files url
380
+ const regex = new RegExp(`${AZURE_STORAGE_CONTAINER_NAME}/([a-z0-9-]+)`);
281
381
  const match = uri.match(regex);
282
382
  if (match && match[1]) {
283
383
  const extractedValue = match[1];
284
384
  useAzure ? await deleteBlob(extractedValue) : await deleteFolder(extractedValue);
285
385
  console.log(`Cleaned temp file ${uri} with request id ${extractedValue}`);
286
386
  }
287
- }catch(err){
387
+ } catch(err) {
288
388
  console.log(`Error cleaning temp file ${uri}:`, err);
289
389
  }
290
390
  }
291
- }else{
292
-
293
- if (isYoutubeUrl) {
294
- // totalCount += 1; // extra 1 step for youtube download
295
- const processAsVideo = req.body?.params?.processAsVideo || req.query?.processAsVideo;
296
- file = await processYoutubeUrl(file, processAsVideo);
297
- }
298
-
391
+ } else {
299
392
  const { chunkPromises, chunkOffsets, uniqueOutputPath } = await splitMediaFile(file);
300
- folder = uniqueOutputPath;
301
393
 
302
394
  numberOfChunks = chunkPromises.length; // for progress reporting
303
395
  totalCount += chunkPromises.length * 4; // 4 steps for each chunk (download and upload)
304
- // isYoutubeUrl && sendProgress(); // send progress for youtube download after total count is calculated
305
396
 
306
397
  // sequential download of chunks
307
398
  const chunks = [];
308
399
  for (const chunkPromise of chunkPromises) {
309
- chunks.push(await chunkPromise);
310
- sendProgress();
400
+ const chunkPath = await chunkPromise;
401
+ chunks.push(chunkPath);
402
+ await sendProgress();
311
403
  }
312
404
 
313
405
  // sequential processing of chunks
314
406
  for (let index = 0; index < chunks.length; index++) {
315
- const chunk = chunks[index];
316
- const blobName = useAzure ? await saveFileToBlob(chunk, requestId) : await moveFileToPublicFolder(chunk, requestId);
407
+ const chunkPath = chunks[index];
408
+ const blobName = useAzure ? await saveFileToBlob(chunkPath, requestId) : await moveFileToPublicFolder(chunkPath, requestId);
317
409
  const chunkOffset = chunkOffsets[index];
318
- result.push({ uri:blobName, offset:chunkOffset });
319
- context.log(`Saved chunk as: ${blobName}`);
320
- sendProgress();
410
+ result.push({ uri: blobName, offset: chunkOffset });
411
+ console.log(`Saved chunk as: ${blobName}`);
412
+ await sendProgress();
321
413
  }
322
414
 
323
- // parallel processing, dropped
324
- // result = await Promise.all(mediaSplit.chunks.map(processChunk));
415
+ // Cleanup the temp directory
416
+ try {
417
+ if (uniqueOutputPath && fs.existsSync(uniqueOutputPath)) {
418
+ fs.rmSync(uniqueOutputPath, { recursive: true });
419
+ console.log(`Cleaned temp directory: ${uniqueOutputPath}`);
420
+ }
421
+ } catch (err) {
422
+ console.log(`Error cleaning temp directory ${uniqueOutputPath}:`, err);
423
+ }
325
424
  }
326
425
  } catch (error) {
327
426
  console.error("An error occurred:", error);
328
- context.res.status(500);
329
- context.res.body = error.message || error;
427
+ context.res = {
428
+ status: 500,
429
+ body: error.message || error
430
+ };
330
431
  return;
331
- } finally {
332
- try {
333
- (isYoutubeUrl) && (await deleteTempPath(file));
334
- folder && (await deleteTempPath(folder));
335
- } catch (error) {
336
- console.error("An error occurred while deleting:", error);
337
- }
338
432
  }
339
433
 
340
434
  console.log('result:', result.map(item =>
@@ -344,8 +438,6 @@ async function main(context, req) {
344
438
  context.res = {
345
439
  body: result
346
440
  };
347
-
348
441
  }
349
442
 
350
-
351
- export default main;
443
+ export default CortexFileHandler;
@@ -25,13 +25,31 @@ async function moveFileToPublicFolder(chunkPath, requestId) {
25
25
  async function deleteFolder(requestId) {
26
26
  if (!requestId) throw new Error('Missing requestId parameter');
27
27
  const targetFolder = join(publicFolder, requestId);
28
- await fs.rm(targetFolder, { recursive: true });
29
- console.log(`Cleaned folder: ${targetFolder}`);
28
+ try {
29
+ // Check if folder exists first
30
+ const stats = await fs.stat(targetFolder);
31
+ if (stats.isDirectory()) {
32
+ // Get list of files before deleting
33
+ const files = await fs.readdir(targetFolder);
34
+ const deletedFiles = files.map(file => join(requestId, file));
35
+ // Delete the folder
36
+ await fs.rm(targetFolder, { recursive: true });
37
+ console.log(`Cleaned folder: ${targetFolder}`);
38
+ return deletedFiles;
39
+ }
40
+ return [];
41
+ } catch (error) {
42
+ if (error.code === 'ENOENT') {
43
+ // Folder doesn't exist, return empty array
44
+ return [];
45
+ }
46
+ throw error;
47
+ }
30
48
  }
31
49
 
32
50
  async function cleanupLocal(urls=null) {
51
+ const cleanedUrls = [];
33
52
  if(!urls){
34
- const cleanedUrls = []; // initialize array for holding cleaned file URLs
35
53
  try {
36
54
  // Read the directory
37
55
  const items = await fs.readdir(publicFolder);