@aj-archipelago/cortex 1.3.11 → 1.3.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. package/helper-apps/cortex-file-handler/.env.test +7 -0
  2. package/helper-apps/cortex-file-handler/.env.test.azure +6 -0
  3. package/helper-apps/cortex-file-handler/.env.test.gcs +9 -0
  4. package/helper-apps/cortex-file-handler/blobHandler.js +263 -179
  5. package/helper-apps/cortex-file-handler/constants.js +107 -0
  6. package/helper-apps/cortex-file-handler/docHelper.js +4 -1
  7. package/helper-apps/cortex-file-handler/fileChunker.js +171 -109
  8. package/helper-apps/cortex-file-handler/helper.js +39 -17
  9. package/helper-apps/cortex-file-handler/index.js +230 -138
  10. package/helper-apps/cortex-file-handler/localFileHandler.js +21 -3
  11. package/helper-apps/cortex-file-handler/package-lock.json +2622 -51
  12. package/helper-apps/cortex-file-handler/package.json +24 -4
  13. package/helper-apps/cortex-file-handler/redis.js +9 -18
  14. package/helper-apps/cortex-file-handler/scripts/setup-azure-container.js +22 -0
  15. package/helper-apps/cortex-file-handler/scripts/setup-test-containers.js +49 -0
  16. package/helper-apps/cortex-file-handler/scripts/test-azure.sh +34 -0
  17. package/helper-apps/cortex-file-handler/scripts/test-gcs.sh +49 -0
  18. package/helper-apps/cortex-file-handler/start.js +26 -4
  19. package/helper-apps/cortex-file-handler/tests/docHelper.test.js +148 -0
  20. package/helper-apps/cortex-file-handler/tests/fileChunker.test.js +322 -0
  21. package/helper-apps/cortex-file-handler/tests/start.test.js +928 -0
  22. package/package.json +1 -1
  23. package/pathways/system/entity/sys_entity_continue.js +1 -1
  24. package/pathways/system/entity/sys_entity_start.js +1 -0
  25. package/pathways/system/entity/sys_generator_video_vision.js +2 -1
  26. package/pathways/system/entity/sys_router_tool.js +6 -4
  27. package/server/plugins/openAiWhisperPlugin.js +9 -13
@@ -0,0 +1,7 @@
1
+ # Test environment configuration
2
+ REDIS_CONNECTION_STRING=redis://default:redispw@localhost:32768
3
+ #AZURE_STORAGE_CONNECTION_STRING=UseDevelopmentStorage=true
4
+ AZURE_STORAGE_CONTAINER_NAME=test-container
5
+ #GCP_SERVICE_ACCOUNT_KEY={"type":"service_account","project_id":"test-project"}
6
+ NODE_ENV=test
7
+ PORT=7072 # Different port for testing
@@ -0,0 +1,6 @@
1
+ # Test environment configuration for Azure tests
2
+ REDIS_CONNECTION_STRING=redis://default:redispw@localhost:32768
3
+ AZURE_STORAGE_CONNECTION_STRING=UseDevelopmentStorage=true
4
+ AZURE_STORAGE_CONTAINER_NAME=test-container
5
+ NODE_ENV=test
6
+ PORT=7072 # Different port for testing
@@ -0,0 +1,9 @@
1
+ # Test environment configuration for Azure tests
2
+ REDIS_CONNECTION_STRING=redis://default:redispw@localhost:32768
3
+ GCP_SERVICE_ACCOUNT_KEY={"project_id":"test-project"}
4
+ STORAGE_EMULATOR_HOST=http://localhost:4443
5
+ GCS_BUCKETNAME=cortextempfiles
6
+ AZURE_STORAGE_CONNECTION_STRING=UseDevelopmentStorage=true
7
+ AZURE_STORAGE_CONTAINER_NAME=test-container
8
+ NODE_ENV=test
9
+ PORT=7072 # Different port for testing
@@ -13,38 +13,6 @@ import axios from "axios";
13
13
  import { publicFolder, port, ipAddress } from "./start.js";
14
14
  import mime from "mime-types";
15
15
 
16
- const IMAGE_EXTENSIONS = [
17
- ".jpg",
18
- ".jpeg",
19
- ".png",
20
- ".gif",
21
- ".bmp",
22
- ".webp",
23
- ".tiff",
24
- ".svg",
25
- ".pdf"
26
- ];
27
-
28
- const VIDEO_EXTENSIONS = [
29
- ".mp4",
30
- ".webm",
31
- ".ogg",
32
- ".mov",
33
- ".avi",
34
- ".flv",
35
- ".wmv",
36
- ".mkv",
37
- ];
38
-
39
- const AUDIO_EXTENSIONS = [
40
- ".mp3",
41
- ".wav",
42
- ".ogg",
43
- ".flac",
44
- ".aac",
45
- ".aiff",
46
- ];
47
-
48
16
  function isBase64(str) {
49
17
  try {
50
18
  return btoa(atob(str)) == str;
@@ -66,7 +34,7 @@ const { project_id: GCP_PROJECT_ID } = GCP_SERVICE_ACCOUNT;
66
34
  let gcs;
67
35
  if (!GCP_PROJECT_ID || !GCP_SERVICE_ACCOUNT) {
68
36
  console.warn(
69
- "Google Cloud Project ID or Service Account details are missing"
37
+ "No Google Cloud Storage credentials provided - GCS will not be used"
70
38
  );
71
39
  } else {
72
40
  try {
@@ -78,24 +46,20 @@ if (!GCP_PROJECT_ID || !GCP_SERVICE_ACCOUNT) {
78
46
  // Rest of your Google Cloud operations using gcs object
79
47
  } catch (error) {
80
48
  console.error(
81
- "Provided Google Cloud Service Account details are invalid: ",
49
+ "Google Cloud Storage credentials are invalid - GCS will not be used: ",
82
50
  error
83
51
  );
84
52
  }
85
53
  }
86
54
 
87
- const GCS_BUCKETNAME = process.env.GCS_BUCKETNAME || "cortextempfiles";
88
-
55
+ export const AZURE_STORAGE_CONTAINER_NAME = process.env.AZURE_STORAGE_CONTAINER_NAME || "whispertempfiles";
56
+ export const GCS_BUCKETNAME = process.env.GCS_BUCKETNAME || "cortextempfiles";
89
57
 
90
- async function gcsUrlExists(url, defaultReturn = true) {
58
+ async function gcsUrlExists(url, defaultReturn = false) {
91
59
  try {
92
- if(!url) {
60
+ if(!url || !gcs) {
93
61
  return defaultReturn; // Cannot check return
94
62
  }
95
- if (!gcs) {
96
- console.warn('GCS environment variables are not set. Unable to check if URL exists in GCS.');
97
- return defaultReturn; // Cannot check return
98
- }
99
63
 
100
64
  const urlParts = url.replace('gs://', '').split('/');
101
65
  const bucketName = urlParts[0];
@@ -115,7 +79,7 @@ async function gcsUrlExists(url, defaultReturn = true) {
115
79
 
116
80
  const getBlobClient = async () => {
117
81
  const connectionString = process.env.AZURE_STORAGE_CONNECTION_STRING;
118
- const containerName = process.env.AZURE_STORAGE_CONTAINER_NAME;
82
+ const containerName = AZURE_STORAGE_CONTAINER_NAME;
119
83
  if (!connectionString || !containerName) {
120
84
  throw new Error(
121
85
  "Missing Azure Storage connection string or container name environment variable"
@@ -175,24 +139,28 @@ const generateSASToken = (containerClient, blobName, expiryTimeSeconds =
175
139
  async function deleteBlob(requestId) {
176
140
  if (!requestId) throw new Error("Missing requestId parameter");
177
141
  const { containerClient } = await getBlobClient();
178
- // List the blobs in the container with the specified prefix
179
- const blobs = containerClient.listBlobsFlat({ prefix: `${requestId}/` });
142
+ // List all blobs in the container
143
+ const blobs = containerClient.listBlobsFlat();
180
144
 
181
145
  const result = [];
182
146
  // Iterate through the blobs
183
147
  for await (const blob of blobs) {
184
- // Delete the matching blob
185
- const blockBlobClient = containerClient.getBlockBlobClient(blob.name);
186
- await blockBlobClient.delete();
187
- console.log(`Cleaned blob: ${blob.name}`);
188
- result.push(blob.name);
148
+ // Check if the blob name starts with requestId_ (flat structure)
149
+ // or is inside a folder named requestId/ (folder structure)
150
+ if (blob.name.startsWith(`${requestId}_`) || blob.name.startsWith(`${requestId}/`)) {
151
+ // Delete the matching blob
152
+ const blockBlobClient = containerClient.getBlockBlobClient(blob.name);
153
+ await blockBlobClient.delete();
154
+ console.log(`Cleaned blob: ${blob.name}`);
155
+ result.push(blob.name);
156
+ }
189
157
  }
190
158
 
191
159
  return result;
192
160
  }
193
161
 
194
- async function uploadBlob(context, req, saveToLocal = false, useGoogle = false, filePath=null, hash=null) {
195
- return new Promise((resolve, reject) => {
162
+ async function uploadBlob(context, req, saveToLocal = false, filePath=null, hash=null) {
163
+ return new Promise(async (resolve, reject) => {
196
164
  try {
197
165
  let requestId = uuidv4();
198
166
  let body = {};
@@ -201,186 +169,259 @@ async function uploadBlob(context, req, saveToLocal = false, useGoogle = false,
201
169
  if (filePath) {
202
170
  const file = fs.createReadStream(filePath);
203
171
  const filename = path.basename(filePath);
204
- uploadFile(context, requestId, body, saveToLocal, useGoogle, file, filename, resolve, hash);
172
+ try {
173
+ const result = await uploadFile(context, requestId, body, saveToLocal, file, filename, resolve, hash);
174
+ resolve(result);
175
+ } catch (error) {
176
+ const err = new Error("Error processing file upload.");
177
+ err.status = 500;
178
+ throw err;
179
+ }
205
180
  } else {
206
181
  // Otherwise, continue working with form-data
207
182
  const busboy = Busboy({ headers: req.headers });
183
+ let hasFile = false;
184
+ let errorOccurred = false;
208
185
 
209
186
  busboy.on("field", (fieldname, value) => {
210
187
  if (fieldname === "requestId") {
211
188
  requestId = value;
212
- } else if (fieldname === "useGoogle") {
213
- useGoogle = value;
214
189
  }
215
190
  });
216
191
 
217
192
  busboy.on("file", async (fieldname, file, filename) => {
218
- uploadFile(context, requestId, body, saveToLocal, useGoogle, file, filename?.filename || filename, resolve, hash);
193
+ if (errorOccurred) return;
194
+ hasFile = true;
195
+ uploadFile(context, requestId, body, saveToLocal, file, filename?.filename || filename, resolve, hash).catch(error => {
196
+ if (errorOccurred) return;
197
+ errorOccurred = true;
198
+ const err = new Error("Error processing file upload.");
199
+ err.status = 500;
200
+ reject(err);
201
+ });
219
202
  });
220
203
 
221
204
  busboy.on("error", (error) => {
222
- context.log.error("Error processing file upload:", error);
223
- context.res = {
224
- status: 500,
225
- body: "Error processing file upload.",
226
- };
227
- reject(error); // Reject the promise
205
+ if (errorOccurred) return;
206
+ errorOccurred = true;
207
+ const err = new Error("No file provided in request");
208
+ err.status = 400;
209
+ reject(err);
210
+ });
211
+
212
+ busboy.on("finish", () => {
213
+ if (errorOccurred) return;
214
+ if (!hasFile) {
215
+ errorOccurred = true;
216
+ const err = new Error("No file provided in request");
217
+ err.status = 400;
218
+ reject(err);
219
+ }
220
+ });
221
+
222
+ // Handle errors from piping the request
223
+ req.on('error', (error) => {
224
+ if (errorOccurred) return;
225
+ errorOccurred = true;
226
+ // Only log unexpected errors
227
+ if (error.message !== "No file provided in request") {
228
+ context.log("Error in request stream:", error);
229
+ }
230
+ const err = new Error("No file provided in request");
231
+ err.status = 400;
232
+ reject(err);
228
233
  });
229
234
 
230
- req.pipe(busboy);
235
+ try {
236
+ req.pipe(busboy);
237
+ } catch (error) {
238
+ if (errorOccurred) return;
239
+ errorOccurred = true;
240
+ // Only log unexpected errors
241
+ if (error.message !== "No file provided in request") {
242
+ context.log("Error piping request to busboy:", error);
243
+ }
244
+ const err = new Error("No file provided in request");
245
+ err.status = 400;
246
+ reject(err);
247
+ }
231
248
  }
232
249
  } catch (error) {
233
- context.log.error("Error processing file upload:", error);
234
- context.res = {
235
- status: 500,
236
- body: "Error processing file upload.",
237
- };
238
- reject(error); // Reject the promise
250
+ // Only log unexpected errors
251
+ if (error.message !== "No file provided in request") {
252
+ context.log("Error processing file upload:", error);
253
+ }
254
+ const err = new Error(error.message || "Error processing file upload.");
255
+ err.status = error.status || 500;
256
+ reject(err);
239
257
  }
240
258
  });
241
259
  }
242
260
 
243
- async function uploadFile(context, requestId, body, saveToLocal, useGoogle, file, filename, resolve, hash=null) {
244
- // do not use Google if the file is not an image or video
245
- const ext = path.extname(filename).toLowerCase();
246
- const canUseGoogle = IMAGE_EXTENSIONS.includes(ext) || VIDEO_EXTENSIONS.includes(ext) || AUDIO_EXTENSIONS.includes(ext);
247
- if (!canUseGoogle) {
248
- useGoogle = false;
249
- }
250
-
251
- // check if useGoogle is set but no gcs and warn
252
- if (useGoogle && useGoogle !== "false" && !gcs) {
253
- context.log.warn("Google Cloud Storage is not initialized reverting google upload ");
254
- useGoogle = false;
255
- }
256
-
257
- const encodedFilename = encodeURIComponent(`${requestId || uuidv4()}_${filename}`);
258
-
259
-
260
- if (saveToLocal) {
261
- // create the target folder if it doesn't exist
262
- const localPath = join(publicFolder, requestId);
263
- fs.mkdirSync(localPath, { recursive: true });
264
-
265
- const destinationPath = `${localPath}/${encodedFilename}`;
266
-
267
- await pipeline(file, fs.createWriteStream(destinationPath));
261
+ // Helper function to handle local file storage
262
+ async function saveToLocalStorage(context, requestId, encodedFilename, file) {
263
+ const localPath = join(publicFolder, requestId);
264
+ fs.mkdirSync(localPath, { recursive: true });
265
+ const destinationPath = `${localPath}/${encodedFilename}`;
266
+ context.log(`Saving to local storage... ${destinationPath}`);
267
+ await pipeline(file, fs.createWriteStream(destinationPath));
268
+ return `http://${ipAddress}:${port}/files/${requestId}/${encodedFilename}`;
269
+ }
268
270
 
269
- const message = `File '${encodedFilename}' saved to folder successfully.`;
270
- context.log(message);
271
+ // Helper function to handle Azure blob storage
272
+ async function saveToAzureStorage(context, encodedFilename, file) {
273
+ const { containerClient } = await getBlobClient();
274
+ const contentType = mime.lookup(encodedFilename);
275
+ const options = contentType ? { blobHTTPHeaders: { blobContentType: contentType } } : {};
276
+
277
+ const blockBlobClient = containerClient.getBlockBlobClient(encodedFilename);
278
+
279
+ context.log(`Uploading to Azure... ${encodedFilename}`);
280
+ await blockBlobClient.uploadStream(file, undefined, undefined, options);
281
+ const sasToken = generateSASToken(containerClient, encodedFilename);
282
+ return `${blockBlobClient.url}?${sasToken}`;
283
+ }
271
284
 
272
- const url = `http://${ipAddress}:${port}/files/${requestId}/${encodedFilename}`;
285
+ // Helper function to upload a file to Google Cloud Storage
286
+ async function uploadToGCS(context, file, encodedFilename) {
287
+ const gcsFile = gcs.bucket(GCS_BUCKETNAME).file(encodedFilename);
288
+ const writeStream = gcsFile.createWriteStream();
289
+
290
+ context.log(`Uploading to GCS... ${encodedFilename}`);
291
+
292
+ await pipeline(file, writeStream);
293
+ return `gs://${GCS_BUCKETNAME}/${encodedFilename}`;
294
+ }
273
295
 
274
- body = { message, url };
296
+ // Helper function to handle Google Cloud Storage
297
+ async function saveToGoogleStorage(context, encodedFilename, file) {
298
+ if (!gcs) {
299
+ throw new Error('Google Cloud Storage is not initialized');
300
+ }
275
301
 
276
- resolve(body); // Resolve the promise
277
- } else {
278
- const { containerClient } = await getBlobClient();
302
+ return uploadToGCS(context, file, encodedFilename);
303
+ }
279
304
 
280
- const contentType = mime.lookup(encodedFilename); // content type based on file extension
281
- const options = {};
282
- if (contentType) {
283
- options.blobHTTPHeaders = { blobContentType: contentType };
305
+ async function uploadFile(context, requestId, body, saveToLocal, file, filename, resolve, hash = null) {
306
+ try {
307
+ if (!file) {
308
+ context.res = {
309
+ status: 400,
310
+ body: 'No file provided in request'
311
+ };
312
+ resolve(context.res);
313
+ return;
284
314
  }
285
315
 
286
- const blockBlobClient = containerClient.getBlockBlobClient(encodedFilename);
287
-
288
- const passThroughStream = new PassThrough();
289
- file.pipe(passThroughStream);
290
-
291
- await blockBlobClient.uploadStream(passThroughStream, undefined, undefined, options);
292
-
293
- const message = `File '${encodedFilename}' uploaded successfully.`;
294
- context.log(message);
295
- const sasToken = generateSASToken(containerClient, encodedFilename);
296
- const url = `${blockBlobClient.url}?${sasToken}`;
297
- body = { message, url };
298
- }
299
-
300
- context.res = {
301
- status: 200,
302
- body,
303
- };
304
-
305
- if (useGoogle && useGoogle !== "false") {
306
- const { url } = body;
307
- const gcsFile = gcs.bucket(GCS_BUCKETNAME).file(encodedFilename);
308
- const writeStream = gcsFile.createWriteStream();
316
+ const encodedFilename = encodeURIComponent(`${requestId || uuidv4()}_${filename}`);
309
317
 
310
- const response = await axios({
311
- method: "get",
312
- url: url,
313
- responseType: "stream",
318
+ // Create duplicate readable streams for parallel uploads
319
+ const streams = [];
320
+ if (gcs) {
321
+ streams.push(new PassThrough());
322
+ }
323
+ streams.push(new PassThrough());
324
+
325
+ // Pipe the input file to all streams
326
+ streams.forEach(stream => {
327
+ file.pipe(stream);
314
328
  });
329
+
330
+ // Set up storage promises
331
+ const storagePromises = [];
332
+ const primaryPromise = saveToLocal
333
+ ? saveToLocalStorage(context, requestId, encodedFilename, streams[streams.length - 1])
334
+ : saveToAzureStorage(context, encodedFilename, streams[streams.length - 1]);
315
335
 
316
- // // Get the total file size from the response headers
317
- // const totalSize = Number(response.headers["content-length"]);
318
- // let downloadedSize = 0;
319
-
320
- // // Listen to the 'data' event to track the progress
321
- // response.data.on("data", (chunk) => {
322
- // downloadedSize += chunk.length;
323
-
324
- // // Calculate and display the progress
325
- // const progress = (downloadedSize / totalSize) * 100;
326
- // console.log(`Progress gsc of ${encodedFilename}: ${progress.toFixed(2)}%`);
327
- // });
328
-
329
- // Pipe the Axios response stream directly into the GCS Write Stream
330
- response.data.pipe(writeStream);
336
+ storagePromises.push(primaryPromise.then(url => ({ url, type: 'primary' })));
337
+
338
+ // Add GCS promise if configured - now uses its own stream
339
+ if (gcs) {
340
+ storagePromises.push(
341
+ saveToGoogleStorage(context, encodedFilename, streams[0])
342
+ .then(gcsUrl => ({ gcs: gcsUrl, type: 'gcs' }))
343
+ );
344
+ }
345
+
346
+ // Wait for all storage operations to complete
347
+ const results = await Promise.all(storagePromises);
331
348
 
332
- await new Promise((resolve, reject) => {
333
- writeStream.on("finish", resolve);
334
- writeStream.on("error", reject);
335
- });
349
+ // Combine results
350
+ const result = {
351
+ message: `File '${encodedFilename}' ${saveToLocal ? 'saved to folder' : 'uploaded'} successfully.`,
352
+ filename,
353
+ ...results.reduce((acc, result) => {
354
+ if (result.type === 'primary') acc.url = result.url;
355
+ if (result.type === 'gcs') acc.gcs = result.gcs;
356
+ return acc;
357
+ }, {})
358
+ };
359
+
360
+ if (hash) {
361
+ result.hash = hash;
362
+ }
336
363
 
337
- body.gcs = `gs://${GCS_BUCKETNAME}/${encodedFilename}`;
338
- }
339
-
340
- if(!body.filename) {
341
- body.filename = filename;
342
- }
343
- if(hash && !body.hash) {
344
- body.hash = hash;
364
+ context.res = {
365
+ status: 200,
366
+ body: result,
367
+ };
368
+
369
+ resolve(result);
370
+ } catch (error) {
371
+ context.log("Error in uploadFile:", error);
372
+ if (body.url) {
373
+ try {
374
+ await cleanup(context, [body.url]);
375
+ } catch (cleanupError) {
376
+ context.log("Error during cleanup after failure:", cleanupError);
377
+ }
378
+ }
379
+ throw error;
345
380
  }
346
- resolve(body); // Resolve the promise
347
381
  }
348
382
 
349
383
  // Function to delete files that haven't been used in more than a month
350
- async function cleanup(urls=null) {
384
+ async function cleanup(context, urls=null) {
351
385
  const { containerClient } = await getBlobClient();
386
+ const cleanedURLs = [];
352
387
 
353
388
  if(!urls) {
354
389
  const xMonthAgo = new Date();
355
390
  xMonthAgo.setMonth(xMonthAgo.getMonth() - 1);
356
391
 
357
392
  const blobs = containerClient.listBlobsFlat();
358
- const cleanedURLs = [];
359
393
 
360
394
  for await (const blob of blobs) {
361
395
  const lastModified = blob.properties.lastModified;
362
396
  if (lastModified < xMonthAgo) {
363
- const blockBlobClient = containerClient.getBlockBlobClient(blob.name);
364
- await blockBlobClient.delete();
365
- console.log(`Cleaned blob: ${blob.name}`);
366
- cleanedURLs.push(blob.name);
397
+ try {
398
+ const blockBlobClient = containerClient.getBlockBlobClient(blob.name);
399
+ await blockBlobClient.delete();
400
+ context.log(`Cleaned blob: ${blob.name}`);
401
+ cleanedURLs.push(blob.name);
402
+ } catch (error) {
403
+ if (error.statusCode !== 404) { // Ignore "not found" errors
404
+ context.log(`Error cleaning blob ${blob.name}:`, error);
405
+ }
406
+ }
367
407
  }
368
408
  }
369
-
370
- return cleanedURLs;
371
- }else{
372
- // Delete the blobs with the specified URLs
373
- const cleanedURLs = [];
409
+ } else {
374
410
  for(const url of urls) {
375
- // Remove the base url to get the blob name
376
- const blobName = url.replace(containerClient.url, '');
377
- const blockBlobClient = containerClient.getBlockBlobClient(blobName);
378
- await blockBlobClient.delete();
379
- console.log(`Cleaned blob: ${blobName}`);
380
- cleanedURLs.push(blobName);
411
+ try {
412
+ const blobName = url.replace(containerClient.url, '');
413
+ const blockBlobClient = containerClient.getBlockBlobClient(blobName);
414
+ await blockBlobClient.delete();
415
+ context.log(`Cleaned blob: ${blobName}`);
416
+ cleanedURLs.push(blobName);
417
+ } catch (error) {
418
+ if (error.statusCode !== 404) { // Ignore "not found" errors
419
+ context.log(`Error cleaning blob ${url}:`, error);
420
+ }
421
+ }
381
422
  }
382
- return cleanedURLs;
383
423
  }
424
+ return cleanedURLs;
384
425
  }
385
426
 
386
427
  async function cleanupGCS(urls=null) {
@@ -432,4 +473,47 @@ async function cleanupGCS(urls=null) {
432
473
  return cleanedURLs;
433
474
  }
434
475
 
435
- export { saveFileToBlob, deleteBlob, uploadBlob, cleanup, cleanupGCS, gcsUrlExists };
476
+ async function deleteGCS(blobName) {
477
+ if (!blobName) throw new Error("Missing blobName parameter");
478
+ if (!gcs) throw new Error("Google Cloud Storage is not initialized");
479
+
480
+ try {
481
+ if (process.env.STORAGE_EMULATOR_HOST) {
482
+ // For fake GCS server, use HTTP API directly
483
+ const response = await axios.delete(
484
+ `http://localhost:4443/storage/v1/b/${GCS_BUCKETNAME}/o/${encodeURIComponent(blobName)}`,
485
+ { validateStatus: status => status === 200 || status === 404 }
486
+ );
487
+ if (response.status === 200) {
488
+ console.log(`Cleaned GCS file: ${blobName}`);
489
+ return [blobName];
490
+ }
491
+ return [];
492
+ } else {
493
+ // For real GCS, use the SDK
494
+ const bucket = gcs.bucket(GCS_BUCKETNAME);
495
+ const file = bucket.file(blobName);
496
+ await file.delete();
497
+ console.log(`Cleaned GCS file: ${blobName}`);
498
+ return [blobName];
499
+ }
500
+ } catch (error) {
501
+ if (error.code !== 404) {
502
+ console.error(`Error in deleteGCS: ${error}`);
503
+ throw error;
504
+ }
505
+ return [];
506
+ }
507
+ }
508
+
509
+ // Helper function to ensure GCS upload for existing files
510
+ async function ensureGCSUpload(context, existingFile) {
511
+ if (!existingFile.gcs && gcs) {
512
+ context.log(`GCS file was missing - uploading.`);
513
+ const encodedFilename = path.basename(existingFile.url.split('?')[0]);
514
+ existingFile.gcs = await uploadToGCS(context, existingFile.url, encodedFilename);
515
+ }
516
+ return existingFile;
517
+ }
518
+
519
+ export { saveFileToBlob, deleteBlob, deleteGCS, uploadBlob, cleanup, cleanupGCS, gcsUrlExists, ensureGCSUpload, gcs };